Ejemplo n.º 1
0
def main():
    #Get user from Database
    client = MongoClient('localhost', 27017)
    db2 = client['fbapp-DB']
    collection2 = db2['fb-users']
    collection3 = db2['fb-interactions']
    admin_token = ""
    user = collection2.find_one({"name": "NehaDeshmukh"})

    for user in collection2.find():
        if 'vizDone' in user:
            if user['vizDone'] == 0:
                name = user['name']
                Id = user['user id']
                access_token = user['access_token']
                graph = facepy.GraphAPI(access_token)
                admin = facepy.GraphAPI(admin_token)
                nodes = []
                friends = []
                links = []
                # get profile object
                user_name = graph.get('me')['name']
                fb_feed = "https://graph.facebook.com/v2.4/me?fields=feed&access_token=" + access_token
                data = requests.get(fb_feed).json()
                first_interaction = data['feed']['data'][0]
                story1 = first_interaction['story']
                story2 = ""
                date1 = dateparser.parse(
                    first_interaction['created_time']).strftime('%m/%d/%y')
                date2 = ""
                while "next" in data:
                    url = data["next"]
                    data = requests.get(url).json()
                    if "data" in data:
                        last_interaction = data['data'][-1]
                        story2 = last_interaction['story']
                        date2 = dateparser.parse(
                            last_interaction['created_time']).strftime(
                                '%m/%d/%y')
                fb_events = "https://graph.facebook.com/v2.4/me?fields=events&access_token=" + access_token
                events_data = requests.get(fb_events).json()
                no_of_events = len(events_data['events']['data'])
                while "next" in events_data:
                    url = events_data["next"]
                    events_data = requests.get(url).json()
                    if "data" in events_data:
                        no_of_events += len(events_data['events']["data"])
                jsonTemp = {}
                jsonTemp['first'] = [story1, date1]
                jsonTemp['last'] = [story2, date2]
                jsonTemp['events'] = no_of_events
                collection2.update(user, {"$set": {'jsonTemp': jsonTemp}})
Ejemplo n.º 2
0
def get_application_graph_api():
    kwargs = {
        'client_id': '[[APP_ID]]',
        'client_secret': '[[APP_SECRET]]',
        'grant_type': 'client_credentials',
    }
    access_token_string = facepy.GraphAPI().get('oauth/access_token', **kwargs)
    print access_token_string
    match = re.match(r"^access_token=(?P<access_token>.*)$",
                     access_token_string)
    if match:
        return facepy.GraphAPI(match.groups()[0])
    raise facepy.FacepyError('no access_token in response')
Ejemplo n.º 3
0
 def get_access_token(self, app_id, app_secret):
     graph = facepy.GraphAPI()
     result = graph.get('oauth/access_token?client_id=' + app_id +
                        '&client_secret=' + app_secret +
                        '&grant_type=client_credentials')
     access_token = result.replace("access_token=", "")
     return access_token
Ejemplo n.º 4
0
def get_graph(app_id, app_secret):
    auth_token = facepy.utils.get_application_access_token(app_id,
                                                           app_secret,
                                                           api_version='2.6')
    graph = facepy.GraphAPI(auth_token)

    return graph
    def __init__(self, queue, apikey, query, curr_time, num_weeks):

        # Super class
        threading.Thread.__init__(self)

        # Queue object given from outside. Queues are threadsafe
        self.queue = queue

        # Graph object for our call, authenticated with a token
        self.graph = facepy.GraphAPI(apikey)

        # FQL query with specified date range
        self.input_query = query

        # Counters. t-total, p-posts, c-comments
        self.tcounter = Counter()
        self.pcounter = Counter()
        self.ccounter = Counter()
        self.tpcounter = Counter()
        self.tccounter = Counter()
        self.cccounter = Counter()

        # Time range, for logging
        self.time_range = datetime.datetime.fromtimestamp(
            curr_time - num_weeks).strftime('%Y-%m-%d') + "-" + \
                          datetime.datetime.fromtimestamp(curr_time).strftime(
                              '%Y-%m-%d')
Ejemplo n.º 6
0
 def graph_get_with_oauth_retry(self, url, page, max_retry_cycles=3):
     """a closure to let the user deal with oauth token expiry"""
     assert max_retry_cycles > 0
     retry_cycle = 0
     while True:
         if retry_cycle >= max_retry_cycles:
             logging.error("Giving up on query {} after {} tries; last exception was {}/{}".format(url,
                                                                                                   retry_cycle,
                                                                                                   type(last_exc),
                                                                                                   last_exc))
             return list()
         retry_cycle += 1
         try:
             return list(self.graph.get(url, page=page))
         except Exception as exc:
             last_exc = exc
             logging.error(exc)
             if "unknown error" not in exc.message:
                 # might be able to recover with a retry or a new token
                 logging.info("Failed with {}/{}: doing simple retry".format(type(exc), exc))
                 try:
                     time.sleep(3)
                     return list(self.graph.get(url, page=page))
                 except facepy.exceptions.OAuthError as exc:
                     logging.error("Retry {} failed; {}/{}".format(retry_cycle, type(exc), exc))
                     logging.info("Update your token; generate a new token by visiting {}".format("https://developers.facebook.com/tools/explorer"))
                     logging.info("Waiting for user to enter new oauth access token...")
                     self.oauth_access_token = raw_input("Enter new oath access token: ")
                     self.oauth_access_token = self.oauth_access_token.strip()
                     self.graph = facepy.GraphAPI(self.oauth_access_token)
Ejemplo n.º 7
0
def suck(save_item, handle_error, source):
    auth_token = facepy.utils.get_application_access_token(
        settings.FACEBOOK['app_id'], settings.FACEBOOK['app_secret'])

    graph = facepy.GraphAPI(auth_token)
    last_retrieved = {}
    cur_dir = os.path.dirname(os.path.realpath(__file__))
    local_filename = cur_dir + '/data/facebook_thailand/facebook_pages.csv'

    with open(local_filename, 'rb') as f:
        reader = csv.reader(f)
        # Skip the headers
        reader.next()

        for row in reader:
            username = row[3]
            lr_key = username.replace('.', '|')

            # Don't bother making requests for a page that no longer exists
            if 'lastRetrieved' in source and lr_key in source[
                    'lastRetrieved'] and source['lastRetrieved'] == 'Failing':
                continue

            got_content = facebook_base.get_content(username,
                                                    graph,
                                                    source,
                                                    save_item,
                                                    admin1='Thailand')

            if not got_content:
                last_retrieved[lr_key] = 'Failing'
            else:
                last_retrieved[lr_key] = datetime.now().strftime("%s")

    return last_retrieved
def auth():
	""" auth works with a local pickle file with the access token, if access token expires, script prompts user with new input from cli """
	print 'Authenticating with Facebook...\n'

	try:
		token = pickle.load(open('token.p', 'rb'))
	except IOError:
		token = getToken()

	if token:
		try:
			graph = facepy.GraphAPI(token)
			profile = graph.get('nrc')
		except facepy.exceptions.OAuthError:
			token = getToken()

	return facepy.GraphAPI(token)
Ejemplo n.º 9
0
def get_app_graph():
    """
    Returns the Graph object for the current app
    """
    global app_graph
    try:
        app_graph.get('%s/app_domains' % settings.FACEBOOK_APP_ID)
    except:
        APP_TOKEN = facepy.get_application_access_token(
            settings.FACEBOOK_APP_ID, settings.FACEBOOK_APP_SECRET)
        app_graph = facepy.GraphAPI(APP_TOKEN)
    return app_graph
Ejemplo n.º 10
0
def update_token(token):
    log("Updating token", Color.BLUE)
    graph = facepy.GraphAPI(token)
    try:
        graph.get('me/posts')
        props_dict = load_properties()
        props_dict['sublets_oauth_access_token'] = token
        props_dict['access_token_expiration'] = time.time(
        ) + 7200  # 2 hours buffer
        save_properties(props_dict)
        log("Token updated, you should now extend it", Color.BLUE)
    except Exception as e:
        log("API error - " + e.message, Color.RED)
Ejemplo n.º 11
0
 def post_to_fb(self, title, url, image_url, created_time, updated_time):
     print("Post to facebook called")
     self.check_extended_token()
     fb_graph = facepy.GraphAPI(self.extended_token)
     fb_update = title + '\n\n' + url
     fb_image = image_url
     fb_graph.post(
         'me/feed',
         message=fb_update,
         source=fb_image,
         created_time=created_time,
         updated_time=updated_time,
     )
Ejemplo n.º 12
0
    def connect(self, **kw):

        # parse kw's
        app_id = kw.get('app_id', settings.FACEBOOK_APP_ID)
        app_secret = kw.get('app_secret', settings.FACEBOOK_APP_SECRET)
        access_token = kw.get('access_token', None)

        # if no access token, create one
        if not access_token:
            access_token = self._generate_app_access_token(app_id, app_secret)

        # return api
        self.conn = facepy.GraphAPI(access_token)
Ejemplo n.º 13
0
def find_photos():
    '''
    Creates a dictionary, with album id as key and a list of images
    in the album as the value.
    '''
    albums = {}
    graph = facepy.GraphAPI(token)
    my_albums = graph.get("%s/albums" % username)
    for album in my_albums:
        albums[album['name']] = {}
        albums[album['name']]['id'] = album['id']
        my_pics = graph.get("%s/photos?limit=100" % album['id'])
        albums[album['name']]['images'] = [pic['source'] for pic in my_pics]
    return albums
Ejemplo n.º 14
0
    def sync_social(self):

        if self.facebook:
            graph = facepy.GraphAPI(self.facebook.token)
            profile = graph.get('me', fields=['name', 'id'])
            self.name = profile['name']
            self.picture_url = u"https://graph.facebook.com/%s/picture?type=normal" % profile[
                'id']
            self.save()

            for page in graph.get('me/friends', page=True):
                friends = page['data']
                for friend in friends:
                    try:
                        acc = SocialAccount.objects.get(provider='facebook',
                                                        uid=friend['id'])
                        profile = Profile.objects.get(user=acc.user)
                    except (SocialAccount.DoesNotExist, Profile.DoesNotExist):
                        continue
                    self.friends.add(profile)

        elif self.twitter:
            api = twitter.Api(consumer_key=self.twitter.app.client_id,
                              consumer_secret=self.twitter.app.secret,
                              access_token_key=self.twitter.token,
                              access_token_secret=self.twitter.token_secret)
            profile = api.VerifyCredentials()
            self.name = profile.screen_name
            self.picture_url = profile.profile_image_url
            if not 'default_profile_images' in self.picture_url:
                self.picture_url = self.picture_url.replace(
                    "_normal", "_80x80")

            try:
                friend_ids = list(
                    api.GetFriendIDs(screen_name=profile.screen_name))
            except twitter.TwitterError:
                friend_ids = []

            self.friend_ids_cache = friend_ids
            self.save()

            friend_user_ids = SocialAccount.objects.filter(
                provider='twitter', uid__in=friend_ids).values_list('user_id',
                                                                    flat=True)

            for p in Profile.objects.filter(user__id__in=friend_user_ids):
                if p.friend_ids_cache and profile.id in p.friend_ids_cache:
                    self.friends.add(p)
Ejemplo n.º 15
0
    def get_context_data(self, **kwargs):

        context = super().get_context_data(**kwargs)

        if self.request.user.is_authenticated:

            social_token = SocialToken.objects.filter(
                account__user=self.request.user).last()

            if social_token:
                graph = facepy.GraphAPI(social_token.token)
                context['data'] = graph.get(
                    'me?fields=name,picture.type(large)')

        return context
Ejemplo n.º 16
0
def generic_facebook_group(fb_group):
    if not hasattr(settings, "FACEBOOK_USER_TOKEN"):
        print "ERROR: Facebook Group %s disabled, please define FACEBOOK_USER_TOKEN in your agenda settings file" % fb_group
        return

    graph = facepy.GraphAPI(settings.FACEBOOK_USER_TOKEN)

    for page in graph.get('%s/events?since=0' % fb_group, page=True):
        for event in page['data']:
            yield {
                'title': event['name'],
                'url': 'http://www.facebook.com/%s' % event['id'],
                'start': parse(event['start_time']).replace(tzinfo=None),
                'location': event.get('location'),
            }
Ejemplo n.º 17
0
 def _call_api(self):
     fb = facepy.GraphAPI(self.__token__)
     built_url = self.__url__.format(video_id=self._video_id,
                                     part=self.__part__)
     try:
         answer = fb.get(built_url)
     except FacebookError as fber:
         raise APIError(fber.code, fber.message)
     except:
         raise APIError(500, "Facebook API Error")
     else:
         if "status" in answer:
             self._data = answer
             return True
         else:
             raise APIError(404, 'Facebook video is not available')
Ejemplo n.º 18
0
def retrieve_admin_ids(group_id, auth_token):
    # Retrieve the uids via FQL query
    graph = facepy.GraphAPI(auth_token)
    admins_query = \
        "SELECT uid FROM group_member WHERE gid=" + group_id + " AND" + \
        " administrator"
    admins = graph.fql(query=admins_query)

    # Parse out the uids from the response
    admins_list = [admin['uid'] for admin in admins]

    # Update the admin_ids in our properties
    saved_props = load_properties()
    saved_props['admin_ids'] = admins_list
    save_properties(saved_props)

    return admins_list
Ejemplo n.º 19
0
    def validate(self, data):
        req = facepy.SignedRequest(data['signed_request'],
                                   settings.FACEBOOK_APP_SECRET_KEY,
                                   settings.FACEBOOK_APP_ID)

        # XXX handle facebook exceptions
        # for example, the user can refuse to share their email address

        graph = facepy.GraphAPI(req.user.oauth_token.token)
        data = graph.get('me?fields=email,first_name,last_name,third_party_id')

        extended_token = facepy.get_extended_access_token(
            req.user.oauth_token.token,
            settings.FACEBOOK_APP_ID, settings.FACEBOOK_APP_SECRET_KEY)

        user = User.objects.get_or_create_facebook_user(data, extended_token)[0]

        return {'user': user}
def likes(id):
    print("now in get_likes")
    graph = facepy.GraphAPI(id)
    try:
        likes = graph.get('me/likes')
    except facepy.exceptions.OAuthError:
        return "*2*2"
    alike = []
    while (True):
        try:
            for l in likes['data']:
                alike.append(l['name'])
            likes = requests.get(likes['paging']['next']).json()
        except KeyError:
            break
    #print(name)
    print(alike)
    return alike
Ejemplo n.º 21
0
    def _generate_app_access_token(self, app_id, app_secret):
        """
        Get an extended OAuth access token.
        :param application_id: An icdsnteger describing the Facebook application's ID.
        :param application_secret_key: A string describing the Facebook application's secret key.
        Returns a tuple with a string describing the extended access token and a datetime instance
        describing when it expires.
        """
        # access tokens
        default_access_token = facepy.get_application_access_token(
            application_id=app_id, application_secret_key=app_secret)
        graph = facepy.GraphAPI(default_access_token)

        response = graph.get(path='oauth/access_token',
                             client_id=app_id,
                             client_secret=app_secret,
                             grant_type='client_credentials')

        return url.get_query_param(response, 'access_token')
Ejemplo n.º 22
0
def fetch_group_posts(group_id: str, group_name: str, token: str, fields: str=None, pagination: bool=True,):
    group, created = FbGroup.objects.get_or_create(
        group_id=group_id,
        name=group_name
    )
    if created:
        group.save()

    if not fields:
        fields = 'id,from,message,attachments,reactions, created_time,' \
                 'comments{message,from,attachment,reactions,created_time,' \
                 'comments{from,message,attachment,reactions,created_time}}'
    graph = facepy.GraphAPI(token)
    fetched_data = graph.get(group_id + "/feed", fields=fields, page=pagination, retry=3)
    for data in fetched_data:
        for post in data.get('data'):
            logger.info('Scrapping post {0} from {1}'.format(post.get('id'), post.get('created_time')))
            manage_post(post, group)

    logger.info('scrapping complete.')
Ejemplo n.º 23
0
    def sync_text(self):
        text = ''

        if self.facebook:
            graph = facepy.GraphAPI(self.facebook.token)
            posts = graph.get('me/posts', limit=400)['data']
            text += '\n'.join(post.get('message', '') for post in posts)

        if self.twitter:
            api = twitter.Api(consumer_key=self.twitter.app.client_id,
                              consumer_secret=self.twitter.app.secret,
                              access_token_key=self.twitter.token,
                              access_token_secret=self.twitter.token_secret)
            posts = api.GetUserTimeline(trim_user=True,
                                        include_rts=False,
                                        count=200)
            text += '\n'.join(post.text or '' for post in posts)

        self.text = force_unicode(text)
        self.save()
Ejemplo n.º 24
0
    def process_request(self, request):
        oauth_token = None
        if request.path.startswith('/js/') or request.path.startswith('/img/')\
         or request.path.startswith('/css/') or request.path.startswith('/admin/'):
            return

        signed_request = self._get_fb_user_from_cookie(request.COOKIES)
        if not signed_request:
            return

        # If the data for API key are passed then we don't need the FB data to be fetched real-time
        if self.is_token_valid(request, signed_request):
            request.user = None  # to be set by the api key
            return

        try:
            graph = facebook.GraphAPI(signed_request.user.oauth_token.token)
            me = graph.get("me")
        except facebook.FacepyError, e:
            request.user = None
Ejemplo n.º 25
0
def dl_fb_files(groupId, accessToken):
    graph = facepy.GraphAPI(accessToken)
    getString = '/v2.2/' + groupId + '/files'
    pages = graph.get(getString, page=True, retry=5, limit=2000)


    try:
        os.makedirs(groupId)
    except OSError as e:
        if e.errno == errno.EEXIST and os.path.isdir(groupId):
            pass
        else: raise

    os.chdir(os.path.join(os.getcwd(), groupId))


    for page in pages:
        for post in page['data']:
            url = post['download_link']
            print('fetching ' + url)
            download_file(url)
Ejemplo n.º 26
0
def test():

    # Check to see if we're running on Heroku, skip if we aren't
    if not os.environ.get('MEMCACHEDCLOUD_SERVERS', None):
        return True

    # Load the properties
    saved_props = load_properties()

    # Access token
    sublets_oauth_access_token = saved_props['sublets_oauth_access_token']

    # ID of the FB group
    group_id = saved_props['group_id']

    graph = facepy.GraphAPI(sublets_oauth_access_token)

    obj = graph.post(group_id + "/feed", message="test")
    postid = obj['id']

    try:
        graph.delete(postid)
    except Exception as e:
        print 'ERROR: ' + e.message
        print type(e)
        print 'Failed to delete with GraphAPI'
        return False

    print "Confirming deletion..."
    time.sleep(2)
    try:
        print graph.get(str(postid))
        return False
    except:
        print "Deletion confirmed ✓"
        return True
Ejemplo n.º 27
0
	def __init__(self, user, auth_token):
		self.user = user
		self.auth_token = auth_token
		self.graph = facepy.GraphAPI(self.auth_token)
Ejemplo n.º 28
0
import urllib
import json
import sys
import os
import facepy
import xlrd
import xlwt
from xlutils.copy import copy

#Miguel Cuellar

#AccessToken y version de el api GRAPH
graph = facepy.GraphAPI('YOUR-ACCESS-TOKEN-HERE', version='2.9')

#Datos del libro de excel a leer para sacar los datos
book = xlrd.open_workbook('WORKBOOK-NAME.xlsx')
max_nb_row = 0
wb = xlwt.Workbook()
ws = wb.add_sheet('Datos de posts')
#iterar por las hojas del libro
for sheet in book.sheets():
    max_nb_row = max(max_nb_row, sheet.nrows)

#iterar por los renglones del libro
for row in range(max_nb_row):
    for sheet in book.sheets():
        if row < sheet.nrows:
            #leer los ids de los posts a analizar
            if str(sheet.cell(row, 1).value) != 'id':
                createdTime = str(sheet.cell(row, 0).value)
                postID = str(sheet.cell(row, 1).value)
Ejemplo n.º 29
0
#Author:Atul Kumar
#Date of creation:19/06/2016
#PACKAGE:facebook,facepy
#ABOUT:This script will comment and likes the post of particular fb page or user whose page_id or user_id is provided by user
#CAUTION:FB May detect that it's a bot and can block you..so go easy folks  lol!

import facebook, facepy

#Client Access token can be get at https://developers.facebook.com/tools
#Make sure you have checked checkbox labeled as publish_actions
accessToken = ''  #Your Access Token here

#Do query and fetch post_id of posts by page
graph0 = facepy.GraphAPI(accessToken)
graph1 = facebook.GraphAPI(access_token=accessToken, version='2.6')
victim_id = input("Enter the Victim facebook id: ")
query = victim_id + '/posts?fields=id&limit=50'
r = graph0.get(query)
postid = [x['id'] for x in r['data']]

print("There are %s post in list!" % len(postid))

choice = input("Do you want to post a comment on these posts? Y/N: ")
if choice == 'Y':
    how_many_to_comment = int(input("On how many post you want to comment: "))
    if how_many_to_comment <= len(postid):
        comment = input("Enter the comment you want to post: ")
        for i in range(how_many_to_comment):
            tmp = postid[i]
            graph1.put_comment(object_id=tmp,
                               message=comment)  #Comment on post
Ejemplo n.º 30
0
    def fetch(self, oauth_access_token, max_pages=None):
        """
        For testing purposes one may limit max_pages.
        """
        self.oauth_access_token = oauth_access_token
        self.graph = facepy.GraphAPI(self.oauth_access_token)

        data = self.graph_get_with_oauth_retry('/v2.6/{}/feed'.format(self.group_id), page=True)
        raw_post_data = []
        page_count = 0
        print("foo")
        for page in data:
            if max_pages and page_count >= max_pages:
                break
            page_count += 1
            try:
                logging.debug("new page")
                if "data" in page:
                    logging.debug("page has %s posts", len(page['data']))
                    raw_post_data += [p for p in page['data']]
                    logging.info("current accumulated posts count: %d, oldest timestamp: %s",
                                 len(raw_post_data),
                                 raw_post_data[-1]["updated_time"])
            except:
                pprint.pprint(page)
                raise

        for post in raw_post_data:
            try:
                post_obj = Post(post)
            except:
                logging.error("Problem with raw post data: %s", pprint.pformat(post))
                raise

            self.add_post(post_obj)

            try:
                logging.info("Fleshing out post {} of {}; {}".format(len(self.posts), len(raw_post_data), self.make_url(post_obj)))
                # TODO sort out this horrible boilerplate

                # Step 0: get post from
                logging.info("Fleshing out post {} of {}; {} -- getting from info".format(len(self.posts), len(raw_post_data), self.make_url(post_obj)))
                post_obj.from_info = self.graph_get_with_oauth_retry('/v2.6/{}?fields=from'.format(post_obj.fb_id), page=True)
                assert len(post_obj.from_info) == 1, post_obj.from_info
                post_obj.poster_id = post_obj.from_info[0]['from']['id']

                # Step 1: extract post reactions
                logging.info("Fleshing out post {} of {}; {} -- getting reactions".format(len(self.posts), len(raw_post_data), self.make_url(post_obj)))
                reactions_pages = list(self.graph_get_with_oauth_retry('/v2.6/{}/reactions'.format(post_obj.fb_id), page=True))
                logging.debug("reactions: %d, %s", len(reactions_pages), pprint.pformat(reactions_pages))

                reactions = []
                try:
                    if reactions_pages and reactions_pages[-1]:
                        for reactions_page in reactions_pages:
                            reactions += reactions_page['data']
                        if 'paging' in reactions_pages[-1]:
                            if 'next' in reactions_pages[-1]['paging']:
                                raise Exception("well that was unexpected")
                except:
                    logging.error("Tripped up on {}".format(pprint.pformat(reactions_pages)))
                    raise

                for reaction_data in reactions:
                    post_obj.add_reaction(Reaction(reaction_data))

                # Step 2: extract post comments, along with their likes
                logging.info("Fleshing out post {} of {}; {} -- getting comments".format(len(self.posts), len(raw_post_data), self.make_url(post_obj)))
                comments_pages = list(self.graph_get_with_oauth_retry('/v2.6/{}/comments?fields=from,created_time,message,id,likes'.format(post_obj.fb_id), page=True))
                logging.debug("comments: %d, %s", len(comments_pages), pprint.pformat(comments_pages))
                comments = []
                try:
                    if comments_pages and comments_pages[-1]:
                        for comments_page in comments_pages:
                            comments += comments_page['data']
                        if 'paging' in comments_pages[-1]:
                            if 'next' in comments_pages[-1]['paging']:
                                raise Exception("well that was unexpected")
                except:
                    logging.error("Tripped up on {}".format(pprint.pformat(comments_pages)))
                    raise

                for comments_data in comments:
                    comment_obj = Comment(comments_data)
                    post_obj.add_comment(comment_obj)

                    # Step 3: extract post comment reactions
                    if 'likes' in comments_data:
                        for like_info in comments_data["likes"]["data"]:
                            comment_obj.add_reaction(Reaction(like_info, is_like=True))

            except Exception:
                logging.warn("Problem fleshing out post data: %s - skipping and continuing", pprint.pformat(post_obj._base_info))
                traceback.print_exc()