def fetch(self, service_id, service_author_id, service_event_id, callback):

    asm = self.get_author_service_map(service_author_id)

    consumer = oauth.Consumer(self.oauth_config['key'], self.oauth_config['secret'])
    token = oauth.Token(asm.access_token, asm.access_token_secret)
    client = oauth.Client(consumer, token)

    # check if this event isCommentable or isLikable
    event_json, = db.Session().query(ServiceEvent.json). \
                               filter(and_(ServiceEvent.author_service_map_id == asm.id,
                                           ServiceEvent.event_id == service_event_id)).one()

    event_obj = json_serializer.load_string(event_json)

    update_obj = None
    if event_obj.get("isCommentable", False):
      url = UPDATE_COMMENTS % (self.oauth_config['endpoint'],
                               service_event_id)

      update_obj = json_serializer.load_string(make_request(client, url, {'x-li-format': 'json'}))

    likes_obj = None
    if event_obj.get("isLikable", False):

      url = UPDATE_LIKES % (self.oauth_config['endpoint'],
                            service_event_id)

      likes_obj = json_serializer.load_string(make_request(client, url, {'x-li-format': 'json'}))

    # merge update and likes together into one object
    if update_obj or likes_obj:

      if update_obj:
        event_obj['updateComments'] = update_obj

      if likes_obj:
        event_obj['isLiked'] = likes_obj['_total'] > 0
        event_obj['numLikes'] = likes_obj['_total']
        event_obj['likes'] = likes_obj

      interpreter = LinkedinEventInterpreter(event_obj, asm, self.oauth_config)

      callback(create_linkedin_event(asm.author_id, CURRENT_STATE, service_author_id, interpreter.get_id(), event_obj))
  def fetch(self, service_author_id, callback):

    super(LinkedinEventCollector, self).fetch(service_author_id, callback)

    state = self.fetch_begin(service_author_id)

    self.fetch_log_info(state)

    asm = state['asm']

    # if this author has no access_token they are unauthorized and we
    # don't collect LinkedIn events for them
    if not asm.access_token:
      return

    service_author_id = asm.service_author_id

    min_age = datetime.utcnow() - self.NEW_LOOKBACK_WINDOW

    # setup what we need for oauth
    consumer = oauth.Consumer(self.oauth_config['key'], self.oauth_config['secret'])
    token = oauth.Token(key=asm.access_token, secret=asm.access_token_secret)
    client = oauth.Client(consumer, token)

    args = {'scope': 'self',
            'count': self.PAGE_SIZE}

    # get only events since last update or past year depending on if this
    # is the first collection of not
    if asm.most_recent_event_timestamp:
      after = calendar.timegm((asm.most_recent_event_timestamp -
                               self.MOST_RECENT_OVERLAP).utctimetuple()) * 1000
    else:
      after = calendar.timegm((datetime.utcnow() -
                               self.NEW_LOOKBACK_WINDOW).utctimetuple()) * 1000
    args['after'] = after

    offset = 0
    args['start'] = offset

    url = '%s%s?%s' % (self.oauth_config['endpoint'],
                       UPDATE_RESOURCE,
                       urllib.urlencode(args, True))

    total_count = 0
    while url:

      # request the user's updates
      raw_json = json_serializer.load_string(make_request(client, url, {'x-li-format': 'json'}))

      if raw_json == None or raw_json.get('_total', 0) == 0:
        url = None
        break

      for post in raw_json.get('values', []):

        update_type = post['updateType']

        if update_type in self.SUPPORTED_TYPES:

          if update_type == 'CONN' and post['updateContent']['person']['id'] == service_author_id:

            # the response can contain multiple connections that the member has made.  We'll
            # separate them into individual responses
            postClone = copy.deepcopy(post)

            for connection in post['updateContent']['person']['connections']['values']:

              postClone['updateContent']['person']['connections'] = {"_total": 1, "values": [copy.deepcopy(connection)]}

              interpreter = LinkedinEventInterpreter(postClone, asm, self.oauth_config)

              if interpreter.get_create_time() < min_age:
                url = None
                break

              if self.screen_event(interpreter, state):
                callback(create_linkedin_event(asm.author_id, CURRENT_STATE, service_author_id, interpreter.get_id(), postClone))

          elif (update_type == 'PREC' or update_type == 'SVPR') and post['updateContent']['person']['id'] == service_author_id:

            interpreter = LinkedinEventInterpreter(post, asm, self.oauth_config)

            if interpreter.get_create_time() < min_age:
              url = None
              break

            if self.screen_event(interpreter, state):
              callback(create_linkedin_event(asm.author_id, CURRENT_STATE, service_author_id, interpreter.get_id(), post))

          elif update_type == 'SHAR':

            interpreter = LinkedinEventInterpreter(post, asm, self.oauth_config)

            if interpreter.get_create_time() < min_age:
              url = None
              break

            if self.screen_event(interpreter, state):
              callback(create_linkedin_event(asm.author_id, CURRENT_STATE, service_author_id, interpreter.get_id(), post))

          elif update_type == 'MSFC' and post['updateContent']['companyPersonUpdate']['person']['id'] == service_author_id:

            interpreter = LinkedinEventInterpreter(post, asm, self.oauth_config)

            if interpreter.get_create_time() < min_age:
              url = None
              break

            if self.screen_event(interpreter, state):
              callback(create_linkedin_event(asm.author_id, CURRENT_STATE, service_author_id, interpreter.get_id(), post))

          elif update_type == 'JOBP' and post['updateContent']['job']['jobPoster']['id'] == service_author_id:

            interpreter = LinkedinEventInterpreter(post, asm, self.oauth_config)

            if interpreter.get_create_time() < min_age:
              url = None
              break

            if self.screen_event(interpreter, state):
              callback(create_linkedin_event(asm.author_id, CURRENT_STATE, service_author_id, interpreter.get_id(), post))

          elif update_type == 'JGRP' and post['updateContent']['person']['id'] == service_author_id:

            # the response can contain multiple groups that the member has joined.  We'll
            # separate them into individual responses
            postClone = copy.deepcopy(post)

            for group in post['updateContent']['person']['memberGroups']['values']:

              postClone['updateContent']['person']['memberGroups'] = {"_total": 1, "values": [copy.deepcopy(group)]}

              interpreter = LinkedinEventInterpreter(postClone, asm, self.oauth_config)

              if interpreter.get_create_time() < min_age:
                url = None
                break

              if self.screen_event(interpreter, state):
                callback(create_linkedin_event(asm.author_id, CURRENT_STATE, service_author_id, interpreter.get_id(), postClone))

          elif update_type == 'STAT' and post['updateContent']['person']['id'] == service_author_id:

            interpreter = LinkedinEventInterpreter(post, asm, self.oauth_config)

            if interpreter.get_create_time() < min_age:
              url = None
              break

            if self.screen_event(interpreter, state):
              callback(create_linkedin_event(asm.author_id, CURRENT_STATE, service_author_id, interpreter.get_id(), post))

        else:
          if not update_type in self.IGNORED_TYPES:
            logging.warning('???? skipping linkedIn event: %s' % update_type)

        # if the url is None stop
        if not url:
          break

      # if the url is None stop
      if not url:
        break

      total_count = total_count + raw_json['_count'] if '_count' in raw_json else raw_json['_total']
      if raw_json['_total'] == total_count:
        url = None
        break

      offset = offset + self.PAGE_SIZE
      args['start'] = offset
      url = '%s%s?%s' % (self.oauth_config['endpoint'], UPDATE_RESOURCE, urllib.urlencode(args, True))

    print total_count

    # terminate the fetch
    self.fetch_end(state)