Esempio n. 1
0
def get_twitter_oauth_handler(channel_or_id=None, callback_url=''):
    '''
    Fetch the OAuth Handler. Encapsulate how we handle this per channel,
    which will determine all the proper keys. The channel account
    type will really drive it.

    Note that we also set the required access token.

    The valid cases for access token arguments:
    1. Both set
    2. Neither set
    '''
    try:

        # Get the auth credentials based on the account type
        (account_consumer_key, account_consumer_secret, account_callback_url,
         access_token_key,
         access_token_secret) = get_twitter_oauth_credentials(channel_or_id)

        auth = tweepy.OAuthHandler(
            account_consumer_key,
            account_consumer_secret,
            account_callback_url + callback_url  #,
            #secure=True
        )

        auth.set_access_token(access_token_key, access_token_secret)

    except KeyError, e:
        raise AppException("App Not correctly configured. %s" % str(e))
Esempio n. 2
0
 def create(self, **kw):
     # check that group with the same name does not exist for this account
     if Group.objects.find(
             name=kw.get('name', ''),
             account=kw.get('account', '')).count():
         raise AppException('A group with same name exists for this account')
     return super(GroupManager, self).create(**kw)
Esempio n. 3
0
 def save(self, **kw):
     # check that no other groups exist with the same combination of
     # name and account
     name = kw.get('name', self.name)
     account = kw.get('account', self.account)
     for g in Group.objects.find(
             name=name,
             account=account):
         if not self.id == g.id:
             raise AppException('A group with same name exists for this account')
     super(Group, self).save(**kw)
Esempio n. 4
0
    def send_message(self, dry_run, creative, post, user, direct_message=None):
        # self.sync_contacts(post.user_profile)
        from solariat_bottle.tasks.twitter import tw_normal_reply, tw_direct_reply

        is_dm = False  # Assume this is not a DM
        if direct_message is not None:
            # If we specifically passed the fact that we want a direct message, use DM
            # Otherwise decide based on post type
            is_dm = direct_message
        else:
            if post.message_type == 'direct':
                is_dm = True
        if not is_dm:
            status = "@%s %s" % (post.user_profile.user_name, creative)
        else:
            status = creative

        if len(status) > 140:
            msg = (
                'Sorry, you have exceeded your 140 character limit by %d characters. '
                'Please correct your reply and try again.') % (len(status) -
                                                               140)
            raise AppException(msg)

        status_id = post.native_id

        # Update the engagement history
        post.user_profile.update_history(self)

        LOGGER.debug("For current message, direct message flag is %s", is_dm)
        if not dry_run and not get_var('ON_TEST'):
            if is_dm:
                tw_direct_reply.ignore(self,
                                       status=status,
                                       screen_name=post.user_profile.user_name)
            else:
                tw_normal_reply.ignore(self,
                                       status=status,
                                       status_id=status_id,
                                       post=post)
        else:
            create_outbound_post(user, self, creative, post)

        LOGGER.debug("Sent '%s' to %s using %s", creative,
                     post.user_profile.user_name, self.title)
Esempio n. 5
0
 def create_by_user(self, user, **kw):
     safe_create = kw.pop('safe_create', False)
     if not safe_create:
         raise AppException("Use db.post.utils.factory_by_user instead")
     sync = kw.pop('sync', False)
     # allocate a Post object
     email_data = kw.pop('email_data', {})
     assert isinstance(email_data, dict), type(email_data)
     # assert email_data, "you should pass 'email_data' arg to factory_by_user() call"
     kw.setdefault("extra_fields", {})
     if email_data:
         assert set(EMAIL_DATA_KEYS) <= set(
             email_data.keys()), "%s !<= %s" % (set(EMAIL_DATA_KEYS),
                                                set(email_data.keys()))
     kw["extra_fields"].update({"email": email_data})
     post = super(EmailPostManager, self).create_by_user(user=user,
                                                         safe_create=True,
                                                         **kw)
     return post
Esempio n. 6
0
    def _get_user_obj(**kw):
        """
        Given a set of accepted parameters, get the user object based on a number of possible fields.

        :param kw: A dictionary which should contain one of the following keys: user, email, username, user_id
        :return: A user object that matches one of the keys passed in
        """

        for k, v in kw.iteritems():
            if (k != "password"):
                LOGGER.info("New SMS connection with user params: %s : %s ." %
                            (k, v))

        if 'user' in kw:
            return kw['user']
        elif 'email' in kw:
            return User.objects.get(email=kw['email'])
        elif 'username' in kw:
            return User.objects.get(email=kw['username'])
        elif 'user_id' in kw:
            return User.objects.get(id=kw['user_id'])
        else:
            raise AppException("user, email or user_id should be provided")
Esempio n. 7
0
    def create_by_user(self, user, **kw):
        '''Wrapper of the default post creation behaviour.  Auto-assign the respective NPS
        classifier SmartTagChannel after creation.'''
        safe_create = kw.pop('safe_create', False)
        if not safe_create:
            raise AppException("Use db.post.utils.factory_by_user instead")

        channels = Channel.objects.find(id__in=kw['channels'])[:]
        content = kw.get('content', '')
        if content == '':
            kw['content'] = DEFAULT_POST_CONTENT

        LOGGER.debug("Creating VOC Post: {}".format(kw))
        if not "response_type" in kw:
            raise RuntimeError("No response_type for voc post")

        # handling response_type
        response_type = kw.get('response_type')
        if response_type not in ACCEPTED_RESPONSE_TYPE_VALUES:
            raise ValueError("invalid response_type: Expected %s, got: [%s]" %
                             (ACCEPTED_RESPONSE_TYPE_VALUES, response_type))

        nps_channels = [
            ch.classifiers[response_type] for ch in channels
            if ch.type_name == "VOC"
        ]

        post = super(VOCPostManager, self).create_by_user(user=user,
                                                          safe_create=True,
                                                          **kw)
        # Add the post to the respective NPS Classifier for all channels
        [
            post.handle_add_tag(user,
                                SmartTagChannel.objects.get(nps_ch),
                                filter_others=False) for nps_ch in nps_channels
        ]
        return post
Esempio n. 8
0
 def _predict_fit(self, item):
     raise AppException(
         'unimplemented method, to be overrided in a subclass')
Esempio n. 9
0
def more_like_post(post, channel):
    """
    Returns a queryset of similar posts in a given channels.
    Similarity determined by list of topics and intentions of the initial post.
    Note that we are looking for posts that are similar, but with opposite
    status, since we want to re-lable
    """
    from solariat_bottle.db.post.base    import Post
    from solariat_bottle.db.speech_act   import SpeechActMap
    from solariat_bottle.db.channel.base import Channel
    from solariat_bottle.db.conversation import Conversation

    from solariat.utils.timeslot import Timeslot, DURATION_DAY

    topics        = []
    intention_ids = []
    channel = Channel.objects.ensure_channels([channel])[0]
    assignment = post.get_assignment(channel)
    if channel.is_smart_tag:
        # for smart tags lookup similar posts in parent channel
        parent_channel = Channel.objects.get(channel.parent_channel)
        status = [SpeechActMap.POTENTIAL, SpeechActMap.ACTIONABLE, SpeechActMap.ACTUAL, SpeechActMap.REJECTED]
    else:
        parent_channel = channel
        status = [SpeechActMap.POTENTIAL]
        if assignment in SpeechActMap.ASSIGNED:
            ''' Postitive assignment could cause a more precise classification
            of a Potential post and could revert the assignment for Rejected
            posts
            '''
            status.append(SpeechActMap.REJECTED)
        elif assignment in {'rejected', 'discarded'}:
            ''' Conversely, may reject potential posts and may cause a reversion
            of prior allocation for Actionable
            '''
            status.append(SpeechActMap.ACTIONABLE)
        else:
            raise AppException("An internal state is not expected: %s. Please contact support for assistance." % assignment)

    for sa in post.speech_acts:
        topics.extend(sa['intention_topics'])
        intention_ids.append(sa['intention_type_id'])

    # The basic post lookup that just searches for the latest objects
    res, more_posts_available = Post.objects.by_time_point(
                                    parent_channel,
                                    ['__ALL__'],
                                    from_ts   = Timeslot(post.created_at-DURATION_DAY),
                                    to_ts     = Timeslot(post.created_at+timedelta(hours=1)),
                                    status    = status,
                                    intention = intention_ids,
                                    languages = [post.language],
                                    limit     = 10)
    res = set(res)

    if (channel.is_smart_tag):
        # Part of new re-labeling. If tag for a post is rejected, we should
        # go through all posts from the post conversation and through first
        # RESPONSE_DEPTH_FACTOR responses containing the tag
        service_channel = get_service_channel(channel)
        if service_channel:
            conversations = Conversation.objects.lookup_conversations(service_channel, [post])

            if len(conversations) == 1:
                # First extend with all other posts from this conversation that have that tag
                # assigned to them
                res |= set([p for p in Post.objects(id__in=list(conversations[0].posts))
                              if (str(p.id) != str(post.id) and str(channel.id) in p.tag_assignments)])
        # Now go through the first RESPONSE_DEPTH_FACTOR responses which have that tag assigned

    elif (not channel.is_smart_tag and
            SpeechActMap.STATUS_MAP[post.get_assignment(channel)] in [SpeechActMap.ACTIONABLE, SpeechActMap.REJECTED]):
        # In case we reject a post, go through all the posts for the first RESPONSE_DEPTH_FACTOR responses from
        # the same service channel
        channels = [channel]
        if channel.parent_channel is not None:
            service_channel   = Channel.objects.get(id=channel.parent_channel)
            channels.append(service_channel)
        channel_filter = [ c.id for c in channels ]
        channel_filter_refs = [DBRef('Channel', ch) for ch in channel_filter]
        if SpeechActMap.STATUS_MAP[post.get_assignment(channel)] == SpeechActMap.REJECTED:
            target_status = [SpeechActMap.POTENTIAL, SpeechActMap.ACTIONABLE]
        else:
            target_status = [SpeechActMap.POTENTIAL, SpeechActMap.REJECTED]
    return list(res)
Esempio n. 10
0
    def create_by_user(self, user, **kw):
        '''
        This method does the lions share of the processing for a new post.

        The keyword arguments we accomodate for twitter are:
        * user_profile - the platform specific user profile
        * url - the url of the post in original location
        * twitter - a packet of extra data for tweets

        The main elements are:
        1. Extract Intentions
        2. Extract Paramaters for creation
        3. Allocate the Post object
        4. Link to Conversation
        5. Compute Tag and Channel Relations (filtering)
        6. Update Statistics
        7. Optionally generate a Response Object for Inbox
        '''
        safe_create = kw.pop('safe_create', False)

        if not safe_create:
            raise AppException("Use db.post.utils.factory_by_user instead")
        add_to_queue = kw.pop('add_to_queue', True)
        sync = kw.pop('sync', False)

        profile = kw.get('user_profile', None)

        # specific twitter additions
        url = kw.pop('url', None)
        twitter_data = kw.pop('twitter', None)
        extra_fields = kw.pop('extra_fields', {})

        if not twitter_data and url and '//twitter' in url:
            twitter_data = {'id': get_status_from_url(url)}

        if twitter_data:
            extra_fields.update({'twitter': twitter_data})
        kw['extra_fields'] = extra_fields
        kw['force_create'] = True

        kw = TwitterPost.patch_post_kw(kw, native_data=twitter_data)
        native_id = None
        if twitter_data and twitter_data.get('id', False):
            native_id = twitter_data.get('id')
        post_data = self._prepare_post_checking_duplicates(TwitterPostManager,
                                                           native_id=native_id,
                                                           **kw)
        post, should_skip = post_data
        if should_skip:
            return post

        post.set_engage_stats(to_save=False)
        post.set_url(profile, twitter_data)  # also saves the post

        # postprocess the post
        if get_var('DEBUG_SKIP_POSTPROCESSING'):
            return post

        if sync or get_var('PROFILING'):
            # running synchronously when profiling
            postprocess_new_post.sync(user, post, add_to_queue)
            post.reload()  # make sure the post has updated stats

        elif get_var(
                'ON_TEST'
        ):  # We still have use-cases where we need to force a sync run
            # running asynchronously even when testing
            # (to maximally model the production environment)

            pp_task = postprocess_new_post. async (user, post, add_to_queue)
            # when testing it is important to check for any exceptions
            pp_task.result()
            post.reload()  # make sure the post has updated stats
        else:
            # running asynchronously not waiting for results
            postprocess_new_post.ignore(user, post, add_to_queue)

        return post
Esempio n. 11
0
 def rejected_items(self):
     raise AppException(
         'unimplemented method, to be overrided in a subclass')
Esempio n. 12
0
    def create_by_user(self, user, name, description, members, roles, channels, smart_tags=None,
                        journey_types=None, journey_tags=None, funnels=None, predictors=None):
        from solariat_bottle.db.channel.base import Channel
        from solariat_bottle.db.journeys.journey_type import JourneyType
        from solariat_bottle.db.journeys.journey_tag import JourneyTag
        from solariat_bottle.db.funnel import Funnel
        from solariat_bottle.db.predictors.base_predictor import BasePredictor

        if not (user.is_staff or user.is_admin):
            raise RuntimeError("Only admin and staff users are allowed to create groups.")
        roles = [int(role) for role in roles] if roles is not None else []
        if not user.current_account:
            LOGGER.error(
                "No account could be found for user {}. Aborting group creation.".format(
                    user.email
                ))
            raise AppException("Error accessing database, we could not load your account." +
                               "Please try later. If this keeps reproducing please contact a Staff member.")

        if smart_tags is None:
            smart_tags = []
        if journey_types is None:
            journey_types = []
        if journey_tags is None:
            journey_tags = []
        if funnels is None:
            funnels = []
        if predictors is None:
            predictors = []

        group = super(GroupManager, self).create(name=name,
                                                 description=description,
                                                 members=list(set(members)),
                                                 channels=channels,
                                                 account=user.current_account,
                                                 smart_tags=smart_tags,
                                                 roles=roles,
                                                 journey_types=journey_types,
                                                 journey_tags=journey_tags,
                                                 funnels=funnels,
                                                 predictors=predictors)
        # Update acl for objects which this group was given access to
        for channel in Channel.objects.find(id__in=[ObjectId(c_id) for c_id in channels]):
            if channel.is_inbound:
                channel = get_service_channel(channel) or channel
            channel.add_perm(user, group=group, to_save=True)

        for tag in Channel.objects.find(id__in=[ObjectId(c_id) for c_id in smart_tags]):
            tag.add_perm(user, group=group, to_save=True)

        for jty in JourneyType.objects.find(id__in=journey_types):
            jty.add_perm(user, group=group, to_save=True)

        for jtg in JourneyTag.objects.find(id__in=journey_tags):
            jtg.add_perm(user, group=group, to_save=True)

        for fnl in Funnel.objects.find(id__in=funnels):
            fnl.add_perm(user, group=group, to_save=True)

        for prd in BasePredictor.objects.find(id__in=predictors):
            prd.add_perm(user, group=group, to_save=True)

        # Update members which are part of this group
        user_ids = [user.objects.get(u_id).id for u_id in members]
        if roles:
            # There are roles added to this group, we need to add all users which
            # have any of those associated roles to the group
            valid_users = user.objects.find(account=user.current_account, user_roles__in=roles)[:]
            user_ids.extend([u.id for u in valid_users])
        user.objects.coll.update({'_id': {'$in': user_ids}},
                                 {'$addToSet': {user.__class__.groups.db_field: group.id}},
                                 multi=True)
        return group
Esempio n. 13
0
def fetch_posts(channels,
                start_ts,
                end_ts,
                topics,
                statuses,
                intentions,
                min_conf,
                agents,
                sort_by='time',
                limit=100,
                message_type=None,
                create_date_limit=None,
                languages=None):

    from solariat_bottle.db.post.utils import get_platform_class
    from solariat_bottle.db.channel.base import Channel
    from solariat.db.fields import BytesField

    # --- Preliminary range query for the core matching elements ---
    topics = [
        t if isinstance(t, dict) else dict(topic=t, topic_type='leaf')
        for t in topics
    ]

    to_binary = BytesField().to_mongo
    match_query_base = []

    for channel in channels:
        for status in statuses:
            # compute id bounds for all posts for this slot
            id_lower_bound = pack_speech_act_map_id(channel, status, start_ts,
                                                    0)
            id_upper_bound = pack_speech_act_map_id(channel, status, end_ts,
                                                    BIGGEST_POST_VALUE)

            # add an id-constraining query
            assert start_ts <= end_ts
            assert id_upper_bound >= id_lower_bound

            match_query_base.append({
                '_id': {
                    "$gte": to_binary(id_lower_bound),
                    "$lte": to_binary(id_upper_bound)
                }
            })

    primary_filter = {"$or": match_query_base}

    # Add intention restrictions, which operate in the main fields
    primary_filter["ic"] = {"$gte": min_conf}
    if intentions:
        primary_filter["ii"] = {"$in": intentions}

    if message_type is not None:
        primary_filter["mtp"] = {"$in": message_type}

    # Constrain for agents, again, at the primary level
    if agents:
        primary_filter["at"] = {"$in": agents}

    if languages:
        from solariat_bottle.db.channel_trends import make_lang_query

        primary_filter = {
            "$and": [
                primary_filter,
                make_lang_query(languages, SpeechActMap.language.db_field)
            ]
        }

    pipeline = [{"$match": primary_filter}]

    # Generate Secondary Filter only if we have topic constraints.
    topics_match_query = []
    for topic in topics:
        if topic['topic'] != ALL_TOPICS:
            topics_match_query.append({
                'tt.l': topic['topic_type'] == 'leaf',
                'tt.t': topic['topic']
            })

    if topics_match_query:
        pipeline.append({"$unwind": "$tt"})
        if len(topics_match_query) == 1:
            pipeline.append({"$match": topics_match_query[0]})
        else:
            pipeline.append({"$match": {"$or": topics_match_query}})

    # First impose a limit because we cannot spend all day fetching data, and in the worst
    # case, the data could be huge. So this limit is selected as a reasonable case for searching
    # posts. We also allow the input param to over-ride this value if it exceeds it.
    pipeline.append({"$limit": max(10000, limit)})

    # We want the data in sorted order in general.
    pipeline.append({"$sort": {"ca": -1}})

    # Now throttle the resulst to a workable page, where specified

    platform = None
    for ch in channels:
        if not isinstance(ch, Channel):
            ch = Channel.objects.get(ch)
        channel_platform = ch.platform
        if platform and platform != channel_platform:
            # TODO: Is this the correct approach or should we just
            # return a bunch of base posts objects in this case ?
            raise AppException(
                "Trying to fetch posts over multiple platforms!")
        else:
            platform = channel_platform

    # Use the correct class depending on the platform we are searching for
    Post = get_platform_class(platform)

    are_more_speech_acts_fetched = True
    len_res_result = 0
    # we start with such limit because there are
    # ~2 speech acts per post on average
    sa_limit = 2 * limit
    posts = set([])

    # posts are created from speech acts (SA)
    # there may be several SAs for one post
    # we keep increasing `sa_limit` for the SA query until n=limit posts are fetched
    # or until no more SA are fetched
    while len(posts) < limit and are_more_speech_acts_fetched:

        pipeline.append({"$limit": sa_limit})
        res = SpeechActMap.objects.coll.aggregate(pipeline)
        new_posts = Post.objects(
            id__in=list(set([r['pt'] for r in res['result']])))
        if create_date_limit:
            new_posts = [
                p for p in new_posts if p.created_at < create_date_limit
            ]
        posts.update(set(new_posts))
        if len_res_result < len(res['result']):
            len_res_result = len(res['result'])
            sa_limit = 2 * sa_limit
        else:
            are_more_speech_acts_fetched = False

        # we add new limit to the pipeline in the beginning of the while loop
        del pipeline[-1]

    posts = list(posts)
    posts.sort(key=lambda p: p.created_at, reverse=True)

    # start_time = datetime.now()
    #LOGGER.debug("PostManager.by_time_point Aggregated and retrieved in %s sec. Result=%d",
    #                 datetime.now()-start_time,
    #                 len(posts))
    #LOGGER.debug("PostManager.by_time_point Pipeline=\n%s", pprint.pformat(pipeline))

    return posts
Esempio n. 14
0
    def create_by_user(self, user, **kw):
        '''
        This method does the lions share of the processing for a new post.

        The keyword arguments we accomodate are:
        * user_profile - the platform specific user profile
        * url - the url of the post in original location
        * facebook - a packet of extra data

        The main elements are:
        1. Extract Intentions
        2. Extract Paramaters for creation
        3. Allocate the Post object
        4. Link to Conversation
        5. Compute Tag and Channel Relations (filtering)
        6. Update Statistics
        7. Optionally generate a Response Object for Inbox
        '''
        safe_create = kw.pop('safe_create', False)
        if not safe_create:
            raise AppException("Use db.post.utils.factory_by_user instead")
        add_to_queue = kw.pop('add_to_queue', True)
        sync = kw.pop('sync', False)

        # specific twitter additions
        url = kw.pop('url', None)
        facebook_data = kw.pop('facebook', None)
        extra_fields = kw.pop('extra_fields', {})

        if facebook_data:
            extra_fields.update({'facebook': facebook_data})

        kw['extra_fields'] = extra_fields
        kw['force_create'] = True
        kw = FacebookPost.patch_post_kw(kw, native_data=facebook_data)

        native_id = None
        if facebook_data and facebook_data.get('facebook_post_id', False):
            native_id = facebook_data.get('facebook_post_id')
        post_data = self._prepare_post_checking_duplicates(PostManager,
                                                           native_id=native_id,
                                                           **kw)
        post, should_skip = post_data
        if should_skip:
            return post

        post.set_engage_stats(to_save=False)
        post.set_url(url)  # also saves the post
        post.set_root_target()
        # postprocess the post
        if get_var('DEBUG_SKIP_POSTPROCESSING'):
            return post

        if sync or get_var('ON_TEST') or get_var('PROFILING'):
            # when testing it is important to check for any exceptions
            postprocess_new_post.sync(user, post, add_to_queue)

            post.reload()  # make sure the post has updated stats
        else:
            # running asynchronously not waiting for results
            postprocess_new_post.ignore(user, post, add_to_queue)

        return post
Esempio n. 15
0
def _set_channel_and_tag_assignments(post):  #{
    '''
    Computes and sets the assignment updates for channels and tags on creation of
    a new post. If a post is from an outbound channel, and this post is in reply
    to another, then there is also an implication for that post. The speech act mapping
    must be updated to enable correct searching of posts from a faceted
    analytics view.

    Keep in mind that we have already computed a first pass of channels
    and tags just by looking at the candidate set.
    '''
    from solariat_bottle.db.speech_act import (SpeechActMap,
                                               reset_speech_act_keys)

    channels = list(post._get_channels())  # copy

    # Also check for smart tags. If we find any, then we want to include them in set
    # of channels to reset
    # LOGGER.info("NOTIFICATION DEBUG: %s", [x.title for x in post.active_smart_tags])
    for tag in post.active_smart_tags:
        assignment, _ = tag.apply_filter(post)
        post.set_assignment(tag, assignment)
        if SpeechActMap.STATUS_MAP[assignment] == SpeechActMap.ACTIONABLE:
            tag.increment_alert_posts_count()
            # LOGGER.info("NOTIFICATION DEBUG: %s; %s, %s", tag.alert_can_be_sent, tag.alert_posts_count, tag.title)
            if tag.alert_can_be_sent:
                send_tag_alert_emails(tag)
                tag.alert_posts_count = 0
            tag.save()
            channels.append(tag)

    # Figure out what the assignment should be for channels.
    # For that we use channel filters.
    # pretty simple, but really they are magic little black boxes :-)
    for channel in channels:
        assignment, _ = channel.apply_filter(post)
        post.set_assignment(channel, assignment)

    agent_channel_map = {}
    for channel in channels:
        # get the service channel for this if you have one
        sc = post.channels_map.get(
            channel.parent_channel if channel.is_smart_tag else channel.id)

        # Get agent data for this channel and process for them
        agent_data = post.get_agent_data(channel)

        # If no agent data, nothing to do. Note that we will only have agent data
        # If it is a service channel. So after this we know it is
        if not agent_data:
            continue

        # Assert thes properties because
        if sc.outbound != channel.id and not (channel.is_smart_tag
                                              and sc.outbound
                                              == channel.parent_channel):
            raise AppException(
                "Ouch! Something went wrong with the data. Please contact support for details."
            )

        # Update agent channel_map
        post.set_reply_context(channel, agent_data)
        agent_id = agent_data['agent']
        agent_channel_map.setdefault(agent_id, []).append(channel)

    # Reset outbound keys and remove from update list
    for agent_id, outbound_channels in agent_channel_map.items():
        reset_speech_act_keys(post,
                              channels=outbound_channels,
                              agent=agent_id,
                              reset_outbound=True)
        for ch in outbound_channels:
            channels.remove(ch)

    # reset SAMs for remaining inbound channels and tags
    if channels:
        reset_speech_act_keys(post, channels=channels)

    post.save()
Esempio n. 16
0
    def reset(cls,
              post,
              channels,
              agent=None,
              reset_outbound=False,
              action='update'):
        '''
        Clears and set keys for all the given channels. Used when assignment between post
        and channel changes, or when assignment between post and agent changes. First
        removes all the old keys, and then geneates new ones. We do not bother updating
        existing documents.
        '''

        if channels == [] or channels == None:
            raise AppException(
                "Oh no! There are no channels provided for synchronizing keys. "
                "This should never happen. Please ask support to have a look at your data."
            )
        # Remove Old Speech Act Keys
        sams = []
        agents_by_channel = {}
        for chan in channels:
            # Initialize agent mapping
            agents_by_channel[get_channel_id(chan)] = cls.ANY_AGENT

            # Now generate all possible ids for all status values
            for status in set(cls.STATUS_MAP.values()):
                sams.extend(make_objects(chan, post, post.speech_acts, status))

        # Now, retrieve the speech act data for agent wherever it exists so we do not
        # lose it.
        for sam in cls.objects(id__in=[sam.id for sam in sams]):
            # Retrieve actual setting if available
            agents_by_channel[get_channel_id(sam.channel)] = sam.agent

        # Nuke the old values. We reset them. Shard key must be immuatble so cannot just
        # change the status value.
        cls.objects.remove(id__in=[sam.id for sam in sams])

        if action == 'remove':
            return []

        # Generate New Speech Act Keys
        sams = []
        for chan in channels:
            # Skip regeneration of keys if this is for a smart tag and it is no longer
            # accepted.....
            if chan.is_smart_tag and chan not in post.accepted_smart_tags:
                continue

            status = cls.STATUS_MAP[post.get_assignment(chan)]
            old_agent = agents_by_channel[get_channel_id(chan)]

            sams.extend(
                make_objects(chan, post, post.speech_acts, status, agent
                             or old_agent))

        for sam in sams:
            try:
                sam.save()
            except DuplicateKeyError:
                LOGGER.error(
                    "There is already an speech act with the same ID = %s.",
                    sam.id)
        return sams
Esempio n. 17
0
        resp.posted_matchable = posted_matchable
        resp.dispatched_text = "%s %s" % (addressee, creative)
        resp.save_by_user(user)
        post.reply(dry_run, creative, user, resp.channel, response_type)

        # Update the ranking model with feedback
        matchable.update_ranking_model(post)
    except AppException, exc:
        # Things break. Good to know as much as possible
        logger.error(exc)
        resp.restore()
        raise exc
    except Exception, exc:
        logger.error(exc, exc_info=True)
        resp.restore()
        raise AppException("%s. Reverting to pending state." % str(exc))


@io_pool.task(result='ignore')
def handle_post_reply(user,
                      creative,
                      outbound_channel_id,
                      post_id,
                      dry_run=False,
                      prefix='',
                      suffix='',
                      response_type=None):
    """Reply is different from Response that it is arbitrary text posted to arbitrary channel
    in reply to some post.
    """
    from solariat_bottle.db.channel.base import Channel
Esempio n. 18
0
 def handle_reject(self, item):
     raise AppException(
         'unimplemented method, to be overrided in a subclass')
Esempio n. 19
0
def sf_create_case(conversation, sf_access_token, instance_url):
    """
    Create case on Salesforce that will correspond to conversation using input sf_access_token.

    :param conversation: a Conversation object that does not have Case created for it (no external_id).
    :param sf_access_token: the access token that will be used to post the results.
    """
    from solariat_bottle.db.account import AccessTokenExpired

    if not instance_url or not sf_access_token:
        logger.error("SFDC account is not authorized. Instance url is: %s and access token is %s" % (
                                                                str(instance_url), str(sf_access_token)))
    headers = { 'X-PrettyPrint' : '1',
                'Authorization' : 'Bearer %s' % sf_access_token,
                "Content-Type": "application/json"
                }
    case_resource = '/services/data/v28.0/sobjects/Case/'
    case_url = instance_url + case_resource
    # The Origin for a Case should be the platform for the service channel for the conversation,
    # and the Subject can be the content of the first Post from the conversation.
    first_post = conversation.POST_CLASS.objects.get(id=conversation.posts[0])
    # subject either a string of topics or, if empty, the first post content
    subject = first_post.plaintext_content

    # truncate if necessary
    solariat_id = truncate(str(conversation.id), 'text', 32)
    subject = truncate(subject, 'text', 255)
    description = truncate(' '.join(first_post.topics).strip(), 'text', 32000)

    sfdc_prefix = get_var('SFDC_MANAGE_PACKAGE_PREFIX')

    data = {
        "Origin" : conversation.service_channel.platform,
        sfdc_prefix + "SolariatId__c" : solariat_id,
        "Subject" : subject,
        "Description" : description,
    }

    try:
        resp = requests.post(case_url, data=json.dumps(data), headers=headers)
    except requests.exceptions.ConnectionError:
        raise SalesforceException("Error while connecting to Salesforce")

    if resp.status_code != 201:
        if resp.status_code == 401:
            raise AccessTokenExpired("Invalid or expired token.")
        logger.error(resp.json()[0].get('message', "Sync to salesforce failed for unknown reason."))
    sf_response = resp.json()
    if isinstance(sf_response, list):
        # sf_response is a dict within a list when this is a duplicate post
        message = sf_response[0].get('message', '')
        message = 'Error occured while posting to Salesforce. {}'.format(message)
        logger.error(message)
        raise SalesforceException(message)
    if not sf_response.get('success', False):
        raise AppException(str(sf_response.get('errors', 'An error occured while posting to Salesforce.')))

    case_id = sf_response['id']
    conversation.set_external_id(case_id)
    sf_sync_conversation(conversation, sf_access_token, instance_url)
    return True
Esempio n. 20
0
    def create_by_user(self, user, **kw):
        safe_create = kw.pop('safe_create', False)
        if not safe_create:
            raise AppException("Use db.post.utils.factory_by_user instead")
        add_to_queue = kw.pop('add_to_queue', False)
        sync = kw.pop('sync',
                      False)  # We might consider dropping this entirely
        self.doc_class.patch_post_kw(kw)
        # handling extra_fields
        chat_data = kw.pop('chat_data', None)
        kw.setdefault("extra_fields", {})
        if chat_data:
            kw["extra_fields"].update({"chat": chat_data})
        kw["extra_fields"].setdefault("chat", {})

        session_id = kw.get(
            "session_id", None) or kw["extra_fields"]["chat"].get("session_id")
        if not session_id:
            session_id = self.gen_session_id()
        kw["session_id"] = session_id
        chat_created_at = chat_data.get('created_at',
                                        None) if chat_data else dict()
        if chat_created_at:
            kw['_created'] = utc(parse_datetime(chat_created_at))

        assert 'actor_id' in kw, "No 'actor_id' provided with chat message, could not infer it based on " + str(
            kw)
        assert 'is_inbound' in kw, "No 'is_inbound' provided with chat message, could not infer it based on " + str(
            kw)

        CustomerProfile = user.account.get_customer_profile_class()
        AgentProfile = user.account.get_agent_profile_class()
        if 'user_profile' not in kw:  # If we have customer id but no specific profile, try to find it in our system
            if kw['is_inbound']:
                customer_or_agent = CustomerProfile.objects.get(kw['actor_id'])
            else:
                customer_or_agent = AgentProfile.objects.get(kw['actor_id'])
            profile = customer_or_agent.get_profile_of_type(ChatProfile)
            if not profile:
                profile = ChatProfile.anonymous_profile(platform='Chat')
            kw['user_profile'] = profile

        if not kw['is_inbound']:
            # We know it's outbound post, we need to figure out actor id based on parent from chat session
            try:
                parent = self.doc_class.objects.find(
                    session_id=session_id,
                    is_inbound=True).sort(_created=-1).limit(1)[:][0]
            # if we can't figure it out, let's put untracked post as a parent
            except IndexError:
                parent = UntrackedChatPost()
            kw['_id'] = pack_event_id(parent.actor.actor_num, kw['_created'])
        else:
            actor_num = self.doc_class.get_actor(True,
                                                 kw['actor_id']).actor_num
            kw['_id'] = pack_event_id(actor_num, kw['_created'])
            # We know that it's inbound post, but may be the first post in conversation was outbound.
            # If that's the case, then this outbound post was fired by UntrackedProfile
            # Now we can encode id using current CustomerProfile instead of UntrackedProfile
            outbount_events = self.doc_class.objects.find(
                session_id=session_id, is_inbound=False)[:]
            for e in outbount_events:
                parent_actor_num, dt = unpack_event_id(e.id)
                if parent_actor_num == 0:
                    e.delete()
                    e.id = pack_event_id(actor_num, dt)
                    e.save()
        kw['force_create'] = True

        lang_data = kw.pop('lang', Language(('en', 1)))
        # creation
        post = self.create(**kw)
        # postprocess_new_post(user, post) - failing for now, something with tag assignments
        assert post.session_id, "ChatPost should have chat session_id"
        self._set_post_lang(post, lang_data)
        postprocess_new_post(user, post, add_to_queue)
        get_service_channel(post.channel).post_received(post)
        return post
Esempio n. 21
0
 def _query(self):
     raise AppException(
         'unimplemented method, to be overrided in a subclass')