Exemple #1
0
    def test_extra_entries(self):
        """Add some entries into PostFilterEntry collection
        that have no correspondent channels.
        Run script and check those entries were removed.
        """
        pf = PostFilterEntry.objects.get()
        pf.entry = 'new__not_in_channel_%s' % pf.entry
        pf.id = None
        pf.save()

        with LoggerInterceptor() as messages:
            run_script(dry_run=True)
            self.assertTrue(messages, msg=messages)
            self.assertEqual(messages[0].message,
                             "Found 1 extra PostFilterEntries")

        self.assert_consistent(False)

        with LoggerInterceptor() as messages:
            run_script(dry_run=False)
            self.assertEqual(messages[0].message,
                             "Found 1 extra PostFilterEntries")
            self.assertTrue(messages[1].message.startswith(
                "Untracking PostFilterEntry: USER_NAME %s" % pf.entry))
        self.assert_consistent()
    def test_stat(self):
        stat = ReconnectStatForTest()

        # check 10 minutes errors
        stat.add('mike').add('mike')
        stat.add('jessy').add('jessy')
        time.sleep(0.1)

        with LoggerInterceptor() as logs:
            stat.log_frequent_reconnects(1)

            self.assertEqual(self._10min_warning_in_logs(logs), 2)
            self.assertFalse(self._1h_warning_in_logs(logs))
            self.assertFalse(self.total_warning_in_logs(logs))

        # check 1 hours errors, raise to THRESHOLD_1H
        for _ in xrange(stat.THRESHOLD_1H - 1):
            stat.add('mike').add('mike')
            stat.add('jessy').add('jessy')
            time.sleep(0.1)
            stat.log_frequent_reconnects(1)

        time.sleep(1)
        with LoggerInterceptor() as logs:
            # emulate total reconnects count lower than threshold
            stat.log_frequent_reconnects(11)

            self.assertEqual(self._1h_warning_in_logs(logs), 2)
            self.assertFalse(self.total_warning_in_logs(logs))

        # check reset every hour
        self.assertTrue(len(stat._10min_stat) == 0)
        self.assertTrue(len(stat._1h_stat) == 0)
        self.assertTrue(len(stat._1h_errors) == 0)

        # check total thresould overrun
        for _ in xrange(stat.THRESHOLD_1H):
            stat.add('mike').add('mike')
            stat.add('jessy').add('jessy', ex=Exception('FakeException'))
            time.sleep(0.1)
            stat.log_frequent_reconnects(1)

        time.sleep(1)
        with LoggerInterceptor() as logs:
            # emulate total threshold overrun
            stat.log_frequent_reconnects(10)

            self.assertEqual(self._1h_warning_in_logs(logs), 2)
            self.assertTrue(self.total_warning_in_logs(logs))
            self.assertTrue(self.ex_output_in_logs(logs))
Exemple #3
0
    def test_missing_entries(self):
        """Add keywords/usernames to service channel directly in db.
        Run script and check those entries are tracked.
        """
        new_keywords = ['new_kwd1', 'en__english_kwd1', 'es__spain_kwd2']
        new_skipwords = ['new_skipword1']
        new_usernames = ['@new_uname1', '@new_user2']

        self.sc.inbound_channel.update(pushAll__keywords=new_keywords,
                                       pushAll__skipwords=new_skipwords)

        self.sc.outbound_channel.update(pushAll__usernames=new_usernames)

        self.assert_consistent(False)

        with LoggerInterceptor() as messages:
            run_script(dry_run=True)
            self.assertEqual(len(messages), 6, msg=messages)

        self.assert_consistent(False)

        with LoggerInterceptor() as messages:
            run_script(dry_run=False)
            self.assertEqual(len(messages), 6, msg=messages)

        self.assert_consistent()

        with LoggerInterceptor() as messages:
            run_script(dry_run=False)
            self.assertFalse(messages)

        self.assert_consistent()

        # double check for lang-prefixed keywords
        self.assertTrue(
            PostFilterEntry.objects(channels=self.sc.inbound_channel,
                                    lang='es',
                                    entry='spain_kwd2').count() == 1)
        self.assertTrue(
            PostFilterEntry.objects(channels=self.sc.inbound_channel,
                                    lang='en',
                                    entry='english_kwd1').count() == 1)
        self.assertTrue(
            PostFilterEntry.objects(channels=self.sc.inbound_channel,
                                    entry='es__spain_kwd2').count() == 0)
        self.assertTrue(
            PostFilterEntry.objects(channels=self.sc.inbound_channel,
                                    entry='en__english_kwd1').count() == 0)
Exemple #4
0
    def test_get_channel_user(self):
        token = self.get_token()
        post_data = dict(token=token, channel=str(self.channel.id))
        try:
            UserProfile.objects.get(user_name='solariatc')
            self.fail("Should be no user profile at this point.")
        except UserProfile.DoesNotExist:
            pass
        with LoggerInterceptor() as logs:
            u_p = self.rpc('commands/twitter/channel_user', **post_data)
            self.assertEqual(
                self._count_message(
                    'Did not find channel user for channel_id=', logs), 1)
            self.assertEqual(u_p["screen_name"], "solariatc")
            try:
                UserProfile.objects.get(user_name='solariatc')
            except UserProfile.DoesNotExist:
                self.fail(
                    "User profile should have been cached by the channel_user call."
                )

            new_u_p = self.rpc('commands/twitter/channel_user', **post_data)
            self.assertEqual(
                self._count_message(
                    'Did not find channel user for channel_id=', logs), 1)

            self.assertDictEqual(new_u_p, u_p)
Exemple #5
0
    def test_add_missing_parents_on_pull(self):
        root_post_data, _ = self.posts_native_data[0]
        post_data, _ = self.posts_native_data[1]
        fbs = FacebookServiceChannel.objects.create_by_user(self.user, title='FBS',
                                                            posts_tracking_enabled=True)

        root_post = self._create_db_post(root_post_data['_wrapped_data']['message'], channel=fbs,
                                         facebook=root_post_data)
        conv = Conversation.objects.get()
        conv.delete()
        root_post.reload()
        self.assertEqual(Conversation.objects(posts=root_post.id).count(), 0)

        post = self._create_db_post(post_data['_wrapped_data']['message'], channel=fbs,
                                    facebook=post_data)
        self.login(user=self.user)

        requests = mock.MagicMock()
        params = {'channel': str(fbs.id),
                  'limit': 10,
                  'reserve_time': 30,
                  'mode': 'conversation',
                  'token': self.auth_token}

        from solariat.tests.base import LoggerInterceptor
        with mock.patch.dict('sys.modules', {'requests': requests}), LoggerInterceptor() as logs:
            response = self.client.get(get_api_url('queue/fetch'),
                                       data=json.dumps(params),
                                       content_type='application/json',
                                       base_url='https://localhost')
            # root post was in database,
            # so it should be added without marking conversation as corrupted
            requests.get.assert_not_called()
            found_parent_msgs = [log.message for log in logs if 'Found parent post' in log.message]
            assert len(found_parent_msgs) == 1
            conv.reload()
            self.assertFalse(conv.is_corrupted)

        # clean database and create conversation with post without parent post
        Conversation.objects.coll.remove()
        FacebookPost.objects.coll.remove()
        QueueMessage.objects.coll.remove()

        post = self._create_db_post(post_data['_wrapped_data']['message'], channel=fbs,
                                    facebook=post_data)
        with mock.patch.dict('sys.modules', {'requests': requests}):
            response = self.client.get(get_api_url('queue/fetch'),
                                       data=json.dumps(params),
                                       content_type='application/json',
                                       base_url='https://localhost')
            self.assertEqual(requests.request.call_count, 1)

        conv.reload()
        self.assertTrue(conv.is_corrupted)

        # simulate recovery of root post
        post = self._create_db_post(root_post_data['_wrapped_data']['message'], channel=fbs,
                                    facebook=root_post_data)
        conv.reload()
        self.assertFalse(conv.is_corrupted)
Exemple #6
0
 def test_support(self):
     from solariat.tests.base import LoggerInterceptor
     with LoggerInterceptor() as logs:
         from solariat.utils.lang.helper import build_lang_map
         lang_map = build_lang_map(
             [lang['code'] for lang in self.TWITTER_LANGUAGES_RESPONSE])
         self.assertFalse(logs)
Exemple #7
0
    def test_non_active_channels(self):
        """Deactivate/Remove channel directly in db.
        Run script and check the correspondent postfilter entries were removed
        """
        self.sc.update(set__status='Suspended')
        self.sc.inbound_channel.update(set__status='Suspended')
        self.sc.outbound_channel.update(set__status='Suspended')

        self.assert_consistent(False)

        with LoggerInterceptor() as messages:
            run_script(dry_run=True)
            self.assertEqual(len(messages), 1)
            self.assertEqual(
                messages[0].message,
                "Found 6 PostFilterEntries for non-active channels")
        self.assert_consistent(False)

        run_script(dry_run=False)
        self.assert_consistent()

        self.etc.update(set__status='Archived')
        self.assert_consistent(False)
        run_script(dry_run=False)
        self.assert_consistent()

        self.assertFalse(self.get_filter_entries() +
                         self.get_channel_entries())
Exemple #8
0
    def test_missing_parent_for_comment__deleted_channel(self):
        root_post_data = self.public_conversation_data[0]
        comment_data = self.public_conversation_data[1]  # first comment
        facebook_handle_id = self.lisa['id']
        facebook_page_ids = ["998309733582959", "297414983930888"]

        fbs1 = FacebookServiceChannel.objects.create_by_user(self.user, title='FBS',
                                                             facebook_handle_id=facebook_handle_id,
                                                             facebook_page_ids=facebook_page_ids,
                                                             posts_tracking_enabled=True)

        root_post = self._create_db_post(root_post_data['_wrapped_data']['message'],
                                         channel=fbs1,
                                         facebook=root_post_data,
                                         user_profile=self.will)
        fbs1.archive()
        conv1 = Conversation.objects.get(posts=root_post.id)
        self.assertRaises(Channel.DoesNotExist, lambda: conv1.service_channel)

        fbs2 = FacebookServiceChannel.objects.create_by_user(self.user, title='FBS',
                                                             facebook_handle_id=facebook_handle_id,
                                                             facebook_page_ids=facebook_page_ids,
                                                             posts_tracking_enabled=True)
        self.login(user=self.user)

        requests = mock.MagicMock()
        params = {'channel': str(fbs2.id),
                  'limit': 10,
                  'reserve_time': 30,
                  'mode': 'root_included',
                  'token': self.auth_token}

        from solariat.tests.base import LoggerInterceptor
        with mock.patch.dict('sys.modules', {'requests': requests}), LoggerInterceptor() as logs:
            # adding comment to another channel
            comment = self._create_db_post(comment_data['_wrapped_data']['message'], channel=fbs2,
                                           facebook=comment_data,
                                           user_profile=self.lisa)
            conv2 = Conversation.objects.get(posts=comment.id)

            self.assertNotEqual(conv1.id, conv2.id)
            self.assertNotEqual(conv1.channels, conv2.channels)

            response = self.client.get(get_api_url('queue/fetch'),
                                       data=json.dumps(params),
                                       content_type='application/json',
                                       base_url='https://localhost')
            # root post was in database,
            # so it should be added without marking conversation as corrupted
            requests.get.assert_not_called()
            found_parent_msgs = [log.message for log in logs if 'Found parent post' in log.message]
            assert len(found_parent_msgs) == 1
            conv2.reload()
            self.assertFalse(conv2.is_corrupted)
            data = json.loads(response.data)
            self.assertEqual([p['id'] for p in data['data'][0]['post_data']], [root_post.id, comment.id])
Exemple #9
0
    def test_normal(self):
        """With proper basic channel setup script
        should not add or delete any PostFilterEntry docs
        """
        self.assert_consistent()

        with LoggerInterceptor() as messages:
            run_script()
            self.assertFalse(messages, msg=messages)

        self.assert_consistent()
Exemple #10
0
 def test_language_set_changed(self):
     """Change channel.languages list directly in db.
     Run script and check PostFilters updated.
     """
     self.sc.inbound_channel.update(pushAll__langs=['es'])
     self.assert_consistent(False)
     with LoggerInterceptor() as messages:
         run_script(dry_run=True)
         self.assertEqual(len(messages), 4, msg=messages)
         for m in messages:
             self.assertTrue(
                 m.message.startswith('Missing PostFilterEntries'))
     run_script(dry_run=False)
     self.assert_consistent()
Exemple #11
0
    def test_get_user_info(self):
        token = self.get_token()
        post_data = dict(token=token,
                         channel=str(self.channel.id),
                         user_id='1411081099')  # This is id for user2_solariat
        try:
            UserProfile.objects.get(user_name='user2_solariat')
            UserProfile.objects.get(id='1411081099:0')
            self.fail("Should be no user profile at this point.")
        except UserProfile.DoesNotExist:
            pass

        with LoggerInterceptor() as logs:
            u_p = self.rpc('commands/twitter/user_info', **post_data)
            self.assertEqual(
                self._count_message(' in db. Fetching from twitter', logs), 1)
            self.assertEqual(u_p["screen_name"], "user2_solariat")
            try:
                UserProfile.objects.get(user_name='user2_solariat')
                UserProfile.objects.get(id='1411081099:0')
            except UserProfile.DoesNotExist:
                self.fail(
                    "User profile should have been cached by the channel_user call."
                )

            new_u_p = self.rpc('commands/twitter/user_info', **post_data)
            self.assertDictEqual(new_u_p, u_p)
            self.assertTrue(
                self._count_message(' in db. Fetching from twitter', logs) >= 1
            )

            post_data = dict(
                token=token,
                channel=str(self.channel.id),
                user_id='1411050992')  # This is id for user1_solariat
            self.rpc('commands/twitter/user_info', **post_data)
            try:
                UserProfile.objects.get(user_name='user1_solariat')
                UserProfile.objects.get(id='1411050992:0')
            except UserProfile.DoesNotExist:
                self.fail(
                    "User profile should have been cached by the channel_user call."
                )
            self.assertTrue(
                self._count_message(' in db. Fetching from twitter', logs) >= 2
            )
Exemple #12
0
    def test_group_smart_tag_deleted(self):
        """Test the unsafely removed smart tag channel
        is pulled from group smart tags list. Issue #3632, #3583
        """
        test_channel = TwitterServiceChannel.objects.create_by_user(
            self.user, title="Test Service Channel")
        other_user = self._create_db_user(email='*****@*****.**',
                                          roles=[AGENT])
        smart_tag = STC.objects.create_by_user(self.user,
                                               title="Tag",
                                               parent_channel=test_channel.id,
                                               account=test_channel.account)
        payload = {
            'description': 'Test',
            'roles': [AGENT, ANALYST],
            'channels': [str(test_channel.id)],
            'members': [str(other_user.id)],
            'smart_tags': [str(smart_tag.id)],
            'name': 'Test Groups'
        }
        self.client.post('/groups/json',
                         data=json.dumps(payload),
                         content_type='application/json')
        groups = self._get_groups()
        self.assertEqual(len(groups), 1)
        self.assertEqual(groups[0]['smart_tags'], [str(smart_tag.id)])

        smart_tag.reload()
        other_user.reload()
        self.assertTrue(smart_tag.has_perm(other_user))
        self.assertTrue(str(groups[0]['id']) in smart_tag.acl)

        smart_tag.delete()

        with LoggerInterceptor() as logs:
            groups = self._get_groups()
            group = Group.objects.get(groups[0]['id'])
            self.assertTrue(
                logs[0].message.startswith("DBRefs pulled from %s.%s:" %
                                           (group, Group.smart_tags.db_field)))
            self.assertTrue(str(smart_tag.id) in logs[0].message)

        self.assertEqual(groups[0]['smart_tags'], [])
Exemple #13
0
    def test_add_missing_parents_on_pull__deleted_channel(self):
        root_post_data, _ = self.posts_native_data[0]
        post_data, _ = self.posts_native_data[1]
        fbs1 = FacebookServiceChannel.objects.create_by_user(self.user, title='FBS',
                                                             posts_tracking_enabled=True)

        root_post = self._create_db_post(root_post_data['_wrapped_data']['message'], channel=fbs1,
                                         facebook=root_post_data)
        fbs1.archive()
        conv1 = Conversation.objects.get(posts=root_post.id)
        self.assertRaises(Channel.DoesNotExist, lambda: conv1.service_channel)

        fbs2 = FacebookServiceChannel.objects.create_by_user(self.user, title='FBS',
                                                             posts_tracking_enabled=True)
        post = self._create_db_post(post_data['_wrapped_data']['message'], channel=fbs2,
                                    facebook=post_data)
        conv2 = Conversation.objects.get(posts=post.id)

        self.assertNotEqual(conv1.id, conv2.id)
        self.assertNotEqual(conv1.channels, conv2.channels)

        self.login(user=self.user)

        requests = mock.MagicMock()
        params = {'channel': str(fbs2.id),
                  'limit': 10,
                  'reserve_time': 30,
                  'mode': 'conversation',
                  'token': self.auth_token}

        from solariat.tests.base import LoggerInterceptor
        with mock.patch.dict('sys.modules', {'requests': requests}), LoggerInterceptor() as logs:
            response = self.client.get(get_api_url('queue/fetch'),
                                       data=json.dumps(params),
                                       content_type='application/json',
                                       base_url='https://localhost')
            # root post was in database,
            # so it should be added without marking conversation as corrupted
            requests.get.assert_not_called()
            found_parent_msgs = [log.message for log in logs if 'Found parent post' in log.message]
            assert len(found_parent_msgs) == 1
            conv2.reload()
            self.assertFalse(conv2.is_corrupted)
Exemple #14
0
    def test_streaming(self):
        """Load tweets from sample file"""
        sample_count = 10

        ch = self.setup_channel(
            keywords=['Directioners4Music', '#Directioners4Music'])
        ch.skip_retweets = False
        ch.save()
        self.start_bot()
        try:
            with LoggerInterceptor() as logs:
                for data in test_utils.filestream():
                    self.send_event('on_data', data)

                self.assert_no_errors_in_logs(logs)

            self.wait_bot(timeout=1 * sample_count)
            self.assertEqual(TwitterPost.objects.count(), sample_count)
        finally:
            self.stop_bot()
Exemple #15
0
        def run_fetch_queue_test(mode='conversation'):
            Conversation.objects.coll.remove()
            FacebookPost.objects.coll.remove()
            QueueMessage.objects.coll.remove()

            root_post = self._create_db_post(root_post_data['_wrapped_data']['message'], channel=fbs,
                                             facebook=root_post_data)
            conv = Conversation.objects.get()
            self.assertEqual([int(p) for p in conv.posts], [root_post.id])

            QueueMessage.objects.coll.remove()  # purge queue

            post = self._create_db_post(post_data['_wrapped_data']['message'], channel=fbs,
                                        facebook=post_data)
            self.login(user=self.user)

            requests = mock.MagicMock()
            params = {'channel': str(fbs.id),
                      'limit': 10,
                      'reserve_time': 30,
                      'mode': mode,
                      'token': self.auth_token}

            from solariat.tests.base import LoggerInterceptor
            with mock.patch.dict('sys.modules', {'requests': requests}), LoggerInterceptor() as logs:
                response = self.client.post(
                    get_api_url('queue/fetch'),
                    data=json.dumps(params),
                    content_type='application/json',
                    base_url='https://localhost')
                data = json.loads(response.data)

                self.assertEqual(len(data['data']), 1)
                if 'post_ids' in data['data'][0]:
                    self.assertEqual(data['data'][0]['post_ids'],
                                     [QueueMessage.objects.make_id(fbs.id, post.id)])
                self.assertEqual(len(data['data'][0]['post_data']), 1)
                self.assertEqual(str(data['data'][0]['post_data'][0]['id']), str(post.id))
Exemple #16
0
    def test_consumer(self):
        manager = JobsManager(jobs_config)
        job = manager.job

        # test consumer/executor success
        @job('pytest')
        def task1(arg):
            return 1

        class Consumer(Thread):
            def run(self):
                JobsConsumer(['pytest'], manager=manager).run()

        Consumer().start()
        with LoggerInterceptor() as logs:
            task1(None)
            time.sleep(0.1)
            success = [
                1 for log in logs
                if 'execution successful\nresult: 1' in log.message
            ]
            self.assertTrue(success)

        original_send_message = manager.producer.send_message
        send_message = MagicMock()
        manager.producer.send_message = send_message

        # test NoJobStatusFound
        send_message.side_effect = lambda topic, message: original_send_message(
            topic, None)

        Consumer().start()
        with LoggerInterceptor() as logs:
            task1(None)
            time.sleep(0.1)
            fail_nojob = [
                1 for log in logs
                if 'Error creating JobExecutor' in log.message
                and log.levelname == 'ERROR'
            ]
            self.assertTrue(fail_nojob)

        # test NoPendingJobsFound
        def _create_running_job(topic, message):
            message.update(status=JobModel.RUNNING)
            return original_send_message(topic, message)

        send_message.side_effect = _create_running_job

        Consumer().start()
        with LoggerInterceptor() as logs:
            task1(None)
            time.sleep(0.1)
            fail_nojob = [
                1 for log in logs
                if 'Another consumer may handle it' in log.message
                and log.levelname == 'ERROR'
            ]
            self.assertTrue(fail_nojob)

        # test execution fail (different git)
        newer_git_commit = 'newer'

        def _commit_hash_mismatch(topic, message):
            message.metadata = {'git_commit': newer_git_commit}
            return original_send_message(topic, message)

        send_message.side_effect = _commit_hash_mismatch

        @job('pytest')
        def fail_task():
            raise Exception('FakeInternalTaskError')

        Consumer().start()
        with LoggerInterceptor() as logs:
            fail_task()
            time.sleep(0.1)
            err = 'has git_commit: %s' % newer_git_commit
            fail = [
                1 for log in logs
                if err in log.message and log.levelname == 'ERROR'
            ]
            self.assertTrue(fail)
Exemple #17
0
    def __set_up_data(self):
        from solariat_bottle.scripts.data_load.gforce.customers import (
            setup_customer_schema, setup_agent_schema, generate_customers,
            generate_agents)

        setup_customer_schema(self.user)
        setup_agent_schema(self.user)

        paths = [
            # Purchasing
            ('Purchasing', [('twitter', 1, 'Research'),
                            ('twitter', 1, 'Select Product')], ('', )),
            ('Purchasing', [('twitter', 1, 'Research'),
                            ('twitter', 1, 'Select Product'),
                            ('twitter', 1, 'Purchase'),
                            ('nps', 1, 'Purchase', 'detractor')], ('', )),
            # Tech Support
            ('Tech Support', [('twitter', 1, 'Report Issue'),
                              ('twitter', 1, 'Consult'),
                              ('twitter', 1, 'Abandon'),
                              ('nps', 1, 'Abandon', 'passive')], ('', )),
            ('Tech Support', [('twitter', 1, 'Report Issue'),
                              ('twitter', 1, 'Consult'),
                              ('twitter', 1, 'Abandon'),
                              ('nps', 1, 'Abandon', 'passive')], ('', )),
            ('Tech Support', [('twitter', 1, 'Report Issue'),
                              ('twitter', 1, 'Consult'),
                              ('twitter', 1, 'Resolve'),
                              ('nps', 1, 'Resolve', 'promoter')], ('', )),
            # Billing
            ('Billing', [('twitter', 1, 'Submit Request'),
                         ('twitter', 1, 'Consult'), ('twitter', 1, 'Abandon'),
                         ('nps', 1, 'Abandon', 'promoter')], ('', )),
            ('Billing', [('twitter', 1, 'Submit Request'),
                         ('twitter', 1, 'Consult')], ('', )),
            ('Billing', [('twitter', 1, 'Submit Request'),
                         ('twitter', 1, 'Consult')], ('', )),
            ('Billing', [('twitter', 1, 'Submit Request'),
                         ('twitter', 1, 'Consult'), ('twitter', 1, 'Resolve'),
                         ('nps', 1, 'Resolve', 'detractor')], ('', )),
            ('Billing', [('twitter', 1, 'Submit Request'),
                         ('twitter', 1, 'Consult'), ('twitter', 1, 'Resolve'),
                         ('nps', 1, 'Resolve', 'passive')], ('', )),
        ]

        n_customers = len(paths)
        customers = generate_customers(self.account.id,
                                       n_customers=n_customers,
                                       status='REGULAR')
        CustomerProfile = self.account.get_customer_profile_class()
        self.assertEqual(CustomerProfile.objects.count(), n_customers)
        customer_journey_counts = [(customer, 1) for customer in customers]

        with LoggerInterceptor() as logs:
            self._create_journeys_with_paths(paths,
                                             customer_journey_counts,
                                             stick_to_paths=True)

        messages = [log.message for log in logs if 'No actor' in log.message]
        self.assertFalse(messages,
                         msg=u'Got "No actor " warnings:\n%s' %
                         '\n'.join(messages))
        self.assertEqual(CustomerProfile.objects.count(), n_customers)

        messages = [
            log.message for log in logs
            if 'has non-existing customer id' in log.message
        ]
        self.assertFalse(
            messages,
            msg=u'Got errors in event.compute_journey_information\n%s' %
            '\n'.join(messages))

        self.assertEqual(CustomerJourney.objects.count(), len(paths))
        self.assertEqual(JourneyType.objects().count(), 3)
        self.assertEqual(JourneyStageType.objects().count(), 15)
Exemple #18
0
    def test_api(self):
        from solariat_bottle.db.events.event import Event

        acc = self.user.account
        CustomerProfile = acc.get_customer_profile_class()
        channel_type = ChannelType.objects.create_by_user(self.user,
                                                          name='APIChannelType',
                                                          account=self.user.account)
        ChClass = channel_type.get_channel_class()
        channel = ChClass.objects.create_by_user(self.user,
                                                 title='API Channel',
                                                 channel_type_id=channel_type.id)

        click_et = acc.event_types.create(self.user, channel_type, 'Click')
        CLICK_TYPE_SCHEMA = [
            {KEY_NAME: 'url', KEY_TYPE: TYPE_STRING},
            {KEY_NAME: 'date', KEY_TYPE: TYPE_TIMESTAMP, KEY_CREATED_AT: True},
            {KEY_NAME: 'session_id', KEY_TYPE: TYPE_STRING, KEY_IS_NATIVE_ID: True},
        ]

        call_et = acc.event_types.create(self.user, channel_type, 'Call')
        CALL_TYPE_SCHEMA = [
            {KEY_NAME: 'phone', KEY_TYPE: TYPE_STRING},
            # TODO: change later, check type is changed
            {KEY_NAME: 'duration', KEY_TYPE: TYPE_STRING},
            {KEY_NAME: 'date', KEY_TYPE: TYPE_TIMESTAMP, KEY_CREATED_AT: True},
            {KEY_NAME: 'agent_id', KEY_TYPE: TYPE_STRING},
        ]

        def gen_data(et, **kw):
            # dt = now() - timedelta(minutes=random.randint(1, 10))
            data = {
                'channel': str(channel.id),
                'actor_id': 1,
                'content': '',
                'token': self.get_token(),

                'event_type': et.name,
                # 'date': datetime_to_timestamp(dt),
            }
            data.update(kw)
            # return dt, data
            return data

        # click_dt, click_data = gen_data(click_et, **{
        click_data = gen_data(click_et, **{
            'url': 'http://site.com/page1/link1',
            'session_id': str(ObjectId()),
        })

        # call_dt, call_data = gen_data(call_et, **{
        call_data = gen_data(call_et, **{
            'phone': '123',
            'duration': str(random.randint(20, 300)),
            'agent_id': str(ObjectId()),
        })

        # post click (no schema)
        with LoggerInterceptor() as logs:
            resp = self.client.post('/api/v2.0/posts',
                data=json.dumps(click_data),
                content_type='application/json',
                base_url='https://localhost')

            errors = [1 for log in logs if 'is not in the field set' in log.message]
            self.assertTrue(errors)
            self.assertEqual(resp.status_code, 500)

        # create schema
        click_et.update(schema=CLICK_TYPE_SCHEMA)
        call_et.update(schema=CALL_TYPE_SCHEMA)

        start = now()

        resp = self.client.post('/api/v2.0/posts',
            data=json.dumps(click_data),
            content_type='application/json',
            base_url='https://localhost')

        click_resp = json.loads(resp.data)
        self.assertEqual(resp.status_code, 200)
        self.assertTrue(click_resp['ok'])

        resp = self.client.post(
            '/api/v2.0/posts',
            data=json.dumps(call_data),
            content_type='application/json',
            base_url='https://localhost')

        call_resp = json.loads(resp.data)
        self.assertEqual(resp.status_code, 200)
        self.assertTrue(click_resp['ok'])

        self.assertEqual(Event.objects.count(), 2)
        call_event = Event.objects.get(call_resp['item']['id'])
        self.assertIsInstance(call_event.duration, basestring)
        customer = CustomerProfile.objects.get(1)
        events_by_customer = Event.objects.range_query_count(start, now(), customer)
        self.assertEqual(events_by_customer, 2)

        # check basic dynamic functionality: change type of filed,
        # sync, check it is changed in db
        for col in CALL_TYPE_SCHEMA:
            if col[KEY_NAME] == 'duration':
                col[KEY_TYPE] = TYPE_INTEGER
        call_et.update_schema(CALL_TYPE_SCHEMA)
        call_et.apply_sync()
        call_et.accept_sync()
        # check data in db has changed
        call_event.reload()
        self.assertIsInstance(call_event.duration, (int, long, float))

        # add event by another customer
        click2_data = gen_data(click_et, **{
            'url': 'http://site.com/page1/link2',
            'session_id': str(ObjectId()),
        })
        resp = self.client.post(
            '/api/v2.0/posts',
            data=json.dumps(click2_data),
            content_type='application/json',
            base_url='https://localhost')

        # check how counters are changed
        self.assertEqual(Event.objects.count(), 3)
        self.assertEqual(events_by_customer, 2)
        self.assertEqual(click_et.get_data_class().objects.count(), 2)
Exemple #19
0
    def test_skipwords_and_skip_retweets(self):
        """Tests tweets with skipwords and retweets
        are skipped according to channel configuration"""
        import random
        up = UserProfile.objects.upsert('Twitter',
                                        profile_data={'screen_name': 'test'})
        ta = self.setup_outbound_channel('OC', twitter_handle=up.user_name)
        ch = self.setup_channel(
            keywords=['Directioners4Music', '#Directioners4Music'],
            usernames=[])
        skipwords = ['test123123', 'skip234345']
        languages = ['en', 'es', 'it', 'fr', 'und']
        for skipword in skipwords:
            ch.add_skipword(skipword)
        ch.set_allowed_langs(languages)
        ch.skip_retweets = True
        ch.save()
        self.sync_bot()

        def is_retweet(data):
            json_data = json.loads(data)
            return 'retweeted_status' in json_data

        def patch_content_with_skipword(data):
            json_data = json.loads(data)
            json_data['text'] = u"%s %s" % (json_data['text'],
                                            random.choice(skipwords))
            return json.dumps(json_data)

        sample_size = 10
        retweets_count = 0
        skipped_count = 0
        max_skipped = 5
        expected_tweets = []
        self.assertEqual(TwitterPost.objects.count(), 0)
        with LoggerInterceptor() as logs:
            for data in test_utils.filestream(languages=languages,
                                              sample_count=sample_size):
                if is_retweet(data):
                    retweets_count += 1
                else:
                    if skipped_count < max_skipped:
                        data = patch_content_with_skipword(data)
                        skipped_count += 1
                    else:
                        expected_tweets.append(json.loads(data)['text'])
                self.send_event('on_data', data)
            self.assert_no_errors_in_logs(
                logs, allow_levels={'DEBUG', 'INFO', 'WARNING'})

        expected_posts_count = sample_size - retweets_count - skipped_count
        self.wait_bot(timeout=1 * expected_posts_count)
        for post in TwitterPost.objects():
            self.assertIn(post.plaintext_content, expected_tweets)
            expected_tweets.remove(post.plaintext_content)
        self.assertFalse(expected_tweets, msg=expected_tweets)
        self.assertEqual(TwitterPost.objects.count(), expected_posts_count)

        from solariat_bottle.daemons.twitter.stream.db import StreamRef
        self.assertTrue(StreamRef.objects().count() > 0)
        self.assertEqual(
            StreamRef.objects(status=StreamRef.RUNNING).count(), 1)
        ref = StreamRef.objects(status=StreamRef.RUNNING).sort(id=-1)[0]
        self.assertEqual(len(ref.languages), len(languages))
        self.assertEqual(set(ref.languages), set(languages))
Exemple #20
0
    def test_follow_unfollow_events(self):
        """Tests UserProfile's followed_by_brands and follows_brands lists
        are being updated on follow/unfollow events from twitter user stream
        """
        from_user = gen_twitter_user('fake_user1')
        to_user = gen_twitter_user('fake_user2')
        etc = EnterpriseTwitterChannel.objects.create(
            status='Active',
            title='ETC',
            access_token_key='dummy_key',
            access_token_secret='dummy_secret',
            twitter_handle=from_user['screen_name'])

        # class StreamData(object):
        #     """Subset of twitter_bot_dm.UserStream class:
        #     `me` and `channel` attributes are used in
        #     follow/unfollow event handling task.
        #     """
        #     me = from_user['screen_name']
        #     channel = str(etc.id)

        def assert_relations(*rels):
            _eq = self.assertEqual
            _t = self.assertTrue
            _ids = lambda lst: [x.id for x in lst]

            for (up, followed_by, follows) in rels:
                up.reload()
                _eq(up.followed_by_brands, _ids(followed_by))
                _eq(up.follows_brands, _ids(follows))
                for u in followed_by:
                    u.reload()
                    _t(up.is_friend(u))

                for u in follows:
                    u.reload()
                    _t(up.is_follower(u))

        def send_event(event_json):
            server = self.tw_bot.server
            print(server, server.__dict__)
            greenlet = server.streams.get(etc.twitter_handle)
            test_connector = greenlet._stream.test_stream
            test_connector.send('on_data', event_json)
            test_connector.next()

        # 1. brand follows user
        event_json = {
            "event": "follow",
            "source": from_user,
            "target": to_user
        }

        self.start_bot()
        self.sync_bot()
        self.wait_bot()

        # stream_data = StreamData()
        # self.post_received(json.dumps(event_json), stream_data)
        send_event(event_json)

        source = get_user_profile(from_user)
        target = get_user_profile(to_user)

        assert_relations(
            # profile   followed_by  follows
            (source, [], []),  # check the user profile of brand has no changes
            (target, [source], []))  # check the follower is added

        # 2. user follows brand
        event_json['source'] = to_user
        event_json['target'] = from_user
        # self.post_received(json.dumps(event_json), stream_data)
        send_event(event_json)

        assert_relations(
            # profile   followed_by  follows
            (source, [], []),  # check the user profile of brand has no changes
            (target, [source], [source]))  # check the friend is added

        # 3. user unfollows brand - this should result in error in logs
        event_json['event'] = 'unfollow'

        with LoggerInterceptor() as logs:
            # self.post_received(json.dumps(event_json), stream_data)
            send_event(event_json)

        import logging
        errors = filter(
            lambda x: x.levelname == logging.getLevelName(logging.ERROR), logs)
        self.assertEqual(errors[0].exc_info[1].message,
                         "Unexpected 'unfollow' event")
        # check nothing changed
        assert_relations(
            # profile   followed_by  follows
            (source, [], []),
            (target, [source], [source]))

        # 4. brand unfollows user
        event_json['source'] = from_user
        event_json['target'] = to_user
        # self.post_received(json.dumps(event_json), stream_data)
        send_event(event_json)
        assert_relations(
            # profile   followed_by  follows
            (source, [], []),
            (target, [], [source]))  # user lost follower

        self.stop_bot()
    def test_multiple_facebook_sc_setup(self):
        fac1, fac2, fsc1, fsc2 = self.fac1, self.fac2, self.fsc1, self.fsc2
        for fa in [fac1, fac2]:
            self.assertIsNone(fa.get_service_channel())
            self.assertFalse(fa.get_attached_service_channels())

        for sc in [fsc1, fsc2]:
            for not_attached in [False, True]:
                self.assertListEqual(
                    sorted([
                        str(ch.id) for ch in sc.list_dispatch_candidates(
                            self.user, only_not_attached=not_attached)
                    ]), sorted([str(fac1.id), str(fac2.id)]))

        # setup attachments
        # fsc1 <-> fac1
        # fsc2 <-> fac2
        fsc1.dispatch_channel = fac1
        fsc1.save()
        fsc2.dispatch_channel = fac2
        fsc2.save()

        self.assertEqual(fac1.get_service_channel(), fsc1)
        self.assertEqual(fac1.get_attached_service_channels(), [fsc1])
        self.assertEqual(fac2.get_service_channel(), fsc2)
        self.assertEqual(fac2.get_attached_service_channels(), [fsc2])

        self.assertEqual(fsc1.get_outbound_channel(self.user), fac1)
        self.assertEqual(fsc2.get_outbound_channel(self.user), fac2)

        # all attached
        for sc in [fsc1, fsc2]:
            self.assertEqual(
                sc.list_dispatch_candidates(self.user, only_not_attached=True),
                [])

        # change attachments
        # fsc1 <-> fac2  (was fac1)
        fsc1.dispatch_channel = fac2
        fsc1.save()

        self.assertEqual(fac1.get_service_channel(), None)
        self.assertEqual(fac1.get_attached_service_channels(), [])
        with LoggerInterceptor() as logs:
            self.assertIn(fac2.get_service_channel(), {fsc1, fsc2})
            warnings = [
                log.message for log in logs if log.levelname == 'WARNING'
            ]
            self.assertTrue(warnings)
            self.assertTrue(warnings[0].startswith(
                "We have multiple candidates for service channel matching for enterprise channel"
            ))
        self.assertEqual(fac2.get_attached_service_channels(), [fsc1, fsc2])

        self.assertEqual(fsc1.get_outbound_channel(self.user), fac2)
        self.assertEqual(fsc2.get_outbound_channel(self.user), fac2)

        # fac1 is not attached
        for sc in [fsc1, fsc2]:
            self.assertEqual(
                sc.list_dispatch_candidates(self.user, only_not_attached=True),
                [fac1])