Exemplo n.º 1
0
    def append_comments(self, submission, origin_post_id, publish_date):
        datetime_object = publish_date
        month_interval = timedelta(self._month_interval * 365 / 12)
        start_date = time.mktime((datetime_object - month_interval).timetuple())
        end_date = time.mktime((datetime_object + month_interval).timetuple())
        publish_date = time.mktime(publish_date.timetuple())

        for comment in submission.comments:
            if isinstance(comment, MoreComments):
                continue
            if comment.created > end_date or comment.created < start_date:
                continue

            created_at = datetime.fromtimestamp(comment.created)
            url = unicode(submission.url + comment.id)
            comment_guid = compute_post_guid(url, comment.id, date_to_str(created_at))

            if start_date < comment.created <= publish_date:
                if comment.score >= 0 and len(comment.body.split(' ')) > 3:
                    self.convert_comment_to_post(comment, submission)
                    self._post_id_tweets_id_before_dict[origin_post_id].add(comment_guid)
            elif publish_date < comment.created <= end_date:
                if comment.score >= 0 and len(comment.body.split(' ')) > 3:
                    self.convert_comment_to_post(comment, submission)
                    self._post_id_tweets_id_after_dict[origin_post_id].add(comment_guid)
Exemplo n.º 2
0
    def _convert_row_to_post(self, row):
        post = Post()

        claim_id = unicode(row['claim_id'])
        title = unicode(row['title'], errors='replace')
        post.content = title

        description = unicode(row['description'], errors='replace')
        post.description = description

        url = unicode(row['url'])
        post.url = url

        publication_date = row['publication_date']
        post.date = date(publication_date)

        post_guid = compute_post_guid(self._social_network_url, claim_id, publication_date)
        post.guid = post_guid
        post.post_id = post_guid
        post.domain = self._domain
        post.author = self._author_name
        author_guid = compute_author_guid_by_author_name(self._author_name)
        post.author_guid = author_guid
        post.post_osn_guid = post_guid

        keywords = unicode(row['keywords'])
        post.tags = keywords

        post_type = unicode(row['post_type'])
        post.post_type = post_type

        return post
Exemplo n.º 3
0
    def _convert_row_to_claim(self, row):
        claim = Claim()

        claim_id = unicode(row['claim_id'])
        title = unicode(row['title'], errors='replace')
        claim.title = title

        description = unicode(row['description'], errors='replace')
        claim.description = description

        url = unicode(row['url'])
        claim.url = url

        verdict_date = row['publication_date']
        claim.verdict_date = date(verdict_date)

        post_guid = compute_post_guid(self._social_network_url, claim_id, verdict_date)
        claim.claim_id = post_guid

        claim.domain = self._domain

        keywords = unicode(row['keywords'])
        claim.keywords = keywords

        verdict = unicode(row['post_type'])
        claim.verdict = verdict

        return claim
    def _add_post(self, author, date, post_osn_id, score=0, upvote_ratio=-1):
        post = Post()
        post.post_osn_id = post_osn_id
        post.author = str(author)
        post.author_guid = compute_author_guid_by_author_name(post.author)
        post.created_at = str_to_date(date, formate="%d/%m/%Y %H:%M")
        post.url = 'https://www.reddit.com{}'.format(
            post.author)  # just for test
        post.guid = compute_post_guid(post.url, post.post_osn_id,
                                      date_to_str(post.created_at))
        post.domain = 'reddit_comment'
        post.post_type = 'reddit_comment'
        post.post_id = post.guid

        reddit_post = RedditPost()
        reddit_post.post_id = post.post_id
        reddit_post.guid = post.guid
        reddit_post.score = score
        if upvote_ratio != -1:
            post.domain = 'reddit_post'
            post.post_type = 'reddit_post'
            reddit_post.upvote_ratio = upvote_ratio
            reddit_post.ups = int(
                round((reddit_post.upvote_ratio * reddit_post.score) /
                      (2 * reddit_post.upvote_ratio - 1)) if
                reddit_post.upvote_ratio != 0.5 else round(reddit_post.score /
                                                           2))
            reddit_post.downs = reddit_post.ups - reddit_post.score
        else:
            reddit_post.ups = -1
            reddit_post.downs = -1
            reddit_post.upvote_ratio = -1

        self._db.addPosts([post, reddit_post])
        return post, reddit_post
    def _convert_tweet_to_post(self, tweet, post_type):
        post = Post()

        post.post_osn_id = unicode(tweet.id)
        post_creation_date = tweet.date
        created_at = unicode(date_to_str(post_creation_date))
        post.created_at = created_at

        post.date = post_creation_date
        post.favorite_count = tweet.favorites
        post.retweet_count = tweet.retweets
        post.content = unicode(tweet.text)

        author_name = unicode(tweet.username)
        post.author = author_name
        # post.author_guid = compute_author_guid_by_author_name(author_name)
        post_url = tweet.permalink
        post.url = unicode(post_url)

        post_guid = compute_post_guid(post_url, author_name, created_at)
        post.guid = post_guid
        post.post_id = post_guid
        post.domain = self._domain

        post.post_type = post_type
        return post
Exemplo n.º 6
0
    def _convert_row_to_claim(self, row):
        claim = Claim()

        # claim_id = unicode(row['claim_id'])
        title = unicode(row['title'], errors='replace')
        claim.title = title

        description = unicode(row['description'], errors='replace')
        claim.description = description

        url = unicode(row['url'])
        claim.url = url

        verdict_date = row['verdict_date']
        claim.verdict_date = date(verdict_date)

        post_guid = compute_post_guid(self._social_network_url, url,
                                      verdict_date)
        claim.claim_id = post_guid

        claim.domain = self._domain

        keywords = unicode(row['keywords'])
        claim.keywords = keywords

        verdict = unicode(row['verdict'])
        claim.verdict = verdict

        claim.category = unicode(row['main_category'])
        claim.sub_category = unicode(row['secondary_category'])

        return claim
Exemplo n.º 7
0
    def _convert_row_to_post(self, row):
        # [site, social_id, username_hash, comment_time, comment_tokens]
        print("\rInsert post to DataFrame {0}/{1}".format(
            self._current_row, len(self.posts_csv_df)),
              end="")
        self._current_row += 1
        date = datetime.datetime.fromtimestamp(row['comment_time'])
        post = Post()
        claim_id = compute_author_guid_by_author_name(str(row['social_id']))
        post.post_id = str(
            compute_post_guid(row['site'] + str(claim_id),
                              row['username_hash'], date_to_str(date)))
        post.content = str(row['comment_tokens'])
        post.author = str(row['username_hash'])
        post.author_guid = str(row['username_hash'])
        post.domain = str(row['site'])
        post.date = date
        self._posts.append(post)

        claim_tweet_connection = Claim_Tweet_Connection()
        claim_tweet_connection.claim_id = str(claim_id)
        claim_tweet_connection.post_id = str(post.post_id)
        self._claim_tweet_connections.append(claim_tweet_connection)

        if self._current_row % self._max_posts_without_save == 0:
            self._save_posts_and_connections()
 def _json_comment_to_db_comment_converter(self, post, domain="Instagram_comment"):
     rpost = Post()
     rpost.post_osn_id = str(post['id'])
     rpost.created_at = datetime.fromtimestamp(post['created_at'])
     rpost.author = post['owner']['id']
     rpost.author_guid = compute_author_guid_by_author_name(rpost.author)
     rpost.url = str('https://www.instagram.com/p/{}/'.format(post['shortcode']))
     rpost.content = post['text']
     rpost.guid = compute_post_guid(rpost.url, rpost.post_osn_id, date_to_str(rpost.created_at))
     rpost.domain = domain
     rpost.post_type = domain
     rpost.post_id = rpost.guid
     return rpost
 def _json_post_to_db_post_converter(self, post, domain="Instagram_post"):
     rpost = Post()
     rpost.post_osn_id = str(post['id'])
     rpost.created_at = datetime.fromtimestamp(post['taken_at_timestamp'])
     rpost.author = post['owner']['id']
     rpost.author_guid = compute_author_guid_by_author_name(rpost.author)
     rpost.url = str('https://www.instagram.com/p/{}/'.format(post['shortcode']))
     rpost.content = ', '.join(x['node']['text'] for x in post['edge_media_to_caption']['edges'])
     rpost.guid = compute_post_guid(rpost.url, rpost.post_osn_id, date_to_str(rpost.created_at))
     rpost.domain = domain
     rpost.post_type = domain
     rpost.post_id = rpost.guid
     return rpost
Exemplo n.º 10
0
 def convert_comment_to_post(self, comment, submission, domain=u"Reddit"):
     post = Post()
     post.post_osn_id = unicode(comment.id)
     post.created_at = datetime.fromtimestamp(comment.created)
     post.date = datetime.fromtimestamp(comment.created)
     if hasattr(comment, 'author') and comment.author:
         post.author = unicode(comment.author.name)
         self._redditors.append(comment.author)
     else:
         self._deleted_redditors.append(str(post.date))
         post.author = unicode('')
     post.author_guid = compute_author_guid_by_author_name(post.author)
     post.url = unicode('https://www.reddit.com' + '/'.join(getattr(comment, 'permalink', '').split('/')[3:7]))
     post.title = unicode(submission.title)
     post.content = unicode(getattr(comment, 'body', '').encode('utf-8').strip())
     post.guid = compute_post_guid(post.url, post.post_osn_id, date_to_str(post.created_at))
     post.domain = domain
     post.post_type = domain
     post.post_id = post.guid
     post.url = u'https://www.reddit.com{}'.format(comment.permalink)
     return post
    def _convert_tweet_dict_to_post(self, tweet_dict):
        post = Post()

        post_osn_id = tweet_dict['id_str']
        post.post_osn_id = post_osn_id

        author_osn_id = tweet_dict['author_osn_id']
        author = self._author_osn_id_author_dict[author_osn_id]
        author_screen_name = author.author_screen_name
        post.author = author_screen_name

        post.author_guid = compute_author_guid_by_author_name(
            author_screen_name)

        created_at = tweet_dict['created_at']
        post.created_at = created_at

        creation_date_str = extract_tweet_publiction_date(created_at)
        creation_date = str_to_date(creation_date_str)
        post.date = creation_date

        post.favorite_count = tweet_dict['favorite_count']
        post.retweet_count = tweet_dict['retweet_count']
        post.reply_count = tweet_dict['reply_count']
        post.content = str(tweet_dict['full_text'])
        post.domain = self._domain
        post.language = str(tweet_dict['lang'])

        post_url = "https://twitter.com/{0}/status/{1}".format(
            author_screen_name, post_osn_id)
        post.url = post_url

        post_guid = compute_post_guid(post_url, author_screen_name,
                                      creation_date_str)
        post.guid = post_guid
        post.post_id = post_guid

        return post
 def _convert_row_to_claim(self, row):
     claim = Claim()
     claim_id = unicode(row['campaign_id'])
     title = unicode(row['title'], errors='replace')
     claim.title = title
     try:
         verdict_date = datetime.datetime.strftime(
             datetime.datetime.strptime(row['date'], '%d-%m-%y'),
             '%Y-%m-%d %H:%M:%S')
     except:
         verdict_date = getConfig().eval(self.__class__.__name__,
                                         'default_date')
     claim.verdict_date = date(verdict_date)
     post_guid = compute_post_guid(self._social_network_url, claim_id,
                                   verdict_date)
     claim.claim_id = post_guid
     claim.domain = self._domain
     keywords = unicode(row['category'])
     claim.keywords = keywords
     verdict = unicode(row['campaign_class'])
     claim.verdict = verdict
     #claim.claim_topic = unicode(row['category'])
     return claim
Exemplo n.º 13
0
    def _parse_articles_lst_to_articles(self, all_articles_lst_of_dics):
        print("###### 'Entering _parse_articles_lst_to_articles'")
        parsed_articles_lst = []
        claims_lst = []
        posts_lst = []
        articles_lst = []
        article_items_lst = []

        # Parsing articles list of dictionaries data, received using the API.
        for news_articles_dic in all_articles_lst_of_dics:
            print("###### 'PARSING: {}'".format(str(news_articles_dic)))
            parsed_articles_lst += self._parse_news_article(news_articles_dic)

        # For each news article dictionary commit:
        for parsed_news_article in parsed_articles_lst:
            print("###### 'Iterating parsed_articles_lst single item: {}'".
                  format(str(parsed_news_article)))
            # Building: claim & News_Article & News_Article_Item objects.
            claim = Claim()
            post = Post()
            article = News_Article()
            article_item = News_Article_Item()

            # Initializing Claim object with data:
            identifier = compute_post_guid(parsed_news_article['url'],
                                           parsed_news_article['author'],
                                           parsed_news_article['publishedAt'])
            claim.claim_id = post.post_id = post.guid = post.post_osn_guid = article.post_id = article_item.post_id = unicode(
                identifier)

            author_guid = compute_author_guid_by_author_name(
                parsed_news_article['author'])
            post.author_guid = article.author_guid = article_item.author_guid = unicode(
                author_guid)

            post.author = article.author = unicode(
                parsed_news_article['author'])

            post.title = claim.title = article.title = unicode(
                parsed_news_article['title'])

            post.content = article_item.content = unicode(
                parsed_news_article['content'])

            post.description = claim.description = article.description = unicode(
                parsed_news_article['description'])

            post.date = post.created_at = claim.verdict_date = article.published_date = datetime.datetime.strptime(
                parsed_news_article['publishedAt'], '%Y-%m-%d %H:%M:%S')

            article_item.source_newsapi_internal_id = unicode(
                parsed_news_article['source_id'])

            article_item.source_newsapi_internal_name = unicode(
                parsed_news_article['source_name'])

            post.url = claim.url = article.url = unicode(
                parsed_news_article['url'])

            article_item.img_url = unicode(parsed_news_article['urlToImage'])

            post.post_type = claim.verdict = unicode(
                "TRUE"
            )  # todo: Add constant. We assume all news articles are true.

            post.domain = claim.domain = unicode(
                "NewsSite")  # todo: Add constant.

            # Update objects lists:
            posts_lst.append(post)
            claims_lst.append(claim)
            articles_lst.append(article)
            article_items_lst.append(article_item)
        print("###### 'EXITING _parse_articles_lst_to_articles'")
        return posts_lst, claims_lst, articles_lst, article_items_lst