コード例 #1
0
    def test_get_story_stats(self,
                             mock_time_bucket,
                             mock_upload,
                             mock_update,
                             mock_linger,
                            ):

        # Set some fake analytics
        linger_data = [{
            'slug': 'slug-here',
            'stats': {
                'total_people': 100,
                'raw_avg_seconds': 330,
                'minutes': 5,
                'seconds': 30
            }
        }]
        mock_upload.return_value = 'http://image-url-here'
        mock_linger.return_value = linger_data
        mock_time_bucket.return_value = 'time bucket'


        # Load a fake story
        clear_stories()
        scraper = SpreadsheetScraper()
        stories = scraper.scrape_spreadsheet('tests/data/stories.xlsx')
        stories = scraper.write([stories[0]])

        get_story_stats()

        # Check the updater
        mock_update.assert_called_once_with(stories[0], linger_data, 'time bucket')
コード例 #2
0
    def test_change_tracking(self, mock_get_channel_name):
        """
        Check if we can start tracking a URL, then update the slugs that are
        tracked on it
        """
        mock_get_channel_name.return_value = 'default-channel'
        clear_stories()
        tracker = NPRStartTracking()

        class FakeMessage(object):
            body = {
                'text': '@carebot track slug-a-b-c on http://www.npr.org/sections/13.7/2016/02/16/466109612/was-einstein-wrong',
                'channel': 'default-channel'
            }

        expected = "Ok, I've started tracking `slug-a-b-c` on http://www.npr.org/sections/13.7/2016/02/16/466109612/was-einstein-wrong"
        message = tracker.respond(FakeMessage)
        assert expected in message['text']

        # Now try to change the slug
        FakeMessage.body['text'] = '@carebot track slug-a-b-c,slug-x-y-z on http://www.npr.org/sections/13.7/2016/02/16/466109612/was-einstein-wrong'
        message = tracker.respond(FakeMessage)
        results = Story.select()
        self.assertEqual(len(results), 1)
        self.assertEqual(results[0].url, 'http://www.npr.org/sections/13.7/2016/02/16/466109612/was-einstein-wrong')
        self.assertEqual(results[0].slug, 'slug-a-b-c,slug-x-y-z')
コード例 #3
0
    def test_change_tracking(self, mock_get_channel_name):
        """
        Check if we can start tracking a URL, then update the slugs that are
        tracked on it
        """
        mock_get_channel_name.return_value = "default-channel"
        clear_stories()
        tracker = NPRStartTracking()

        class FakeMessage(object):
            body = {
                "text": "@carebot track slug-a-b-c on http://www.npr.org/sections/13.7/2016/02/16/466109612/was-einstein-wrong",
                "channel": "default-channel",
            }

        expected = "Ok, I've started tracking `slug-a-b-c` on http://www.npr.org/sections/13.7/2016/02/16/466109612/was-einstein-wrong"
        message = tracker.respond(FakeMessage)
        assert expected in message["text"]

        # Now try to change the slug
        FakeMessage.body[
            "text"
        ] = "@carebot track slug-a-b-c,slug-x-y-z on http://www.npr.org/sections/13.7/2016/02/16/466109612/was-einstein-wrong"
        message = tracker.respond(FakeMessage)
        results = Story.select()
        self.assertEqual(len(results), 1)
        self.assertEqual(results[0].url, "http://www.npr.org/sections/13.7/2016/02/16/466109612/was-einstein-wrong")
        self.assertEqual(results[0].slug, "slug-a-b-c,slug-x-y-z")
コード例 #4
0
ファイル: test_spreadsheet.py プロジェクト: PotterSys/carebot
    def test_write_spreadsheet(self, mock_upload):
        mock_upload.return_value = 'http://image-url-here'

        clear_stories()

        scraper = SpreadsheetScraper()
        stories = scraper.scrape_spreadsheet('tests/data/stories.xlsx')

        scraper.write(stories)

        results = Story.select()
        self.assertEqual(len(results), 4)

        for idx, story in enumerate(stories):
            self.assertEqual(results[idx].name, story['story_headline'])
            self.assertEqual(results[idx].url, story['story_url'])
コード例 #5
0
    def test_write_spreadsheet(self, mock_upload):
        mock_upload.return_value = 'http://image-url-here'

        clear_stories()

        scraper = SpreadsheetScraper()
        stories = scraper.scrape_spreadsheet('tests/data/stories.xlsx')

        scraper.write(stories)

        results = Story.select()
        self.assertEqual(len(results), 4)

        for idx, story in enumerate(stories):
            self.assertEqual(results[idx].name, story['story_headline'])
            self.assertEqual(results[idx].url, story['story_url'])
コード例 #6
0
ファイル: test_spreadsheet.py プロジェクト: PotterSys/carebot
    def test_write_spreadsheet_duplicates(self, mock_upload):
        mock_upload.return_value = 'http://image-url-here'

        clear_stories()

        scraper = SpreadsheetScraper()
        stories = scraper.scrape_spreadsheet('tests/data/stories.xlsx')

        # Insert the stories
        scraper.write(stories)
        results = Story.select()
        self.assertEqual(len(results), 4)

        # Now insert them again and make sure we don't have duplicates
        scraper.write(stories)
        results = Story.select()
        self.assertEqual(len(results), 4)
コード例 #7
0
    def test_write_spreadsheet_duplicates(self, mock_upload):
        mock_upload.return_value = 'http://image-url-here'

        clear_stories()

        scraper = SpreadsheetScraper()
        stories = scraper.scrape_spreadsheet('tests/data/stories.xlsx')

        # Insert the stories
        scraper.write(stories)
        results = Story.select()
        self.assertEqual(len(results), 4)

        # Now insert them again and make sure we don't have duplicates
        scraper.write(stories)
        results = Story.select()
        self.assertEqual(len(results), 4)
コード例 #8
0
    def test_write_spreadsheet(self, mock_upload):
        """
        Make sure we save the stories to the database when scraping from a
        spreadsheet
        """
        clear_stories()

        scraper = SpreadsheetScraper(self.source)
        stories = scraper.scrape_spreadsheet('tests/data/stories.xlsx')

        scraper.write(stories)

        results = Story.select()
        self.assertEqual(len(results), 4)

        for idx, story in enumerate(stories):
            self.assertEqual(results[idx].name, story['story_headline'])
            self.assertEqual(results[idx].url, story['story_url'])
コード例 #9
0
    def test_write_spreadsheet(self, mock_upload):
        """
        Make sure we save the stories to the database when scraping from a
        spreadsheet
        """
        clear_stories()

        scraper = SpreadsheetScraper(self.source)
        stories = scraper.scrape_spreadsheet('tests/data/stories.xlsx')

        scraper.write(stories)

        results = Story.select()
        self.assertEqual(len(results), 4)

        for idx, story in enumerate(stories):
            self.assertEqual(results[idx].name, story['story_headline'])
            self.assertEqual(results[idx].url, story['story_url'])
コード例 #10
0
    def test_handle_slug_inquiry(self,
                                 mock_upload,
                                 mock_histogram,
                                 mock_linger,
                                ):

        # Set some fake analytics
        linger_data = [
            [10, 10],
            [20, 10],
            [30, 10],
            [40, 10],
            [50, 10],
            [60, 10],
            [120, 10],
            [180, 10],
            [240, 10],
            [300, 10],
        ]
        mock_linger.return_value = linger_data
        mock_histogram.return_value = 'http://image-url-here'
        mock_upload.return_value = 'http://image-url-here'

        slug = 'x-y-z'
        linger = NPRLingerRate()
        class FakeMessage(object):
            body = {
                'text': 'check slug ' + slug
            }

        clear_stories()
        Story.create(
            name = 'example',
            slug = slug,
            date = datetime.datetime.now(),
            url = 'example.com',
            team = 'deafult'
        )

        message = linger.handle_slug_inquiry(FakeMessage)
        print message
        assert u'*100* people spent a median *55 seconds* on `x-y-z`' in message['text']
        self.assertEqual(message['attachments'][0]['title'], slug)
コード例 #11
0
    def test_handle_slug_inquiry(
        self,
        mock_upload,
        mock_histogram,
        mock_linger,
    ):

        # Set some fake analytics
        linger_data = [
            [10, 10],
            [20, 10],
            [30, 10],
            [40, 10],
            [50, 10],
            [60, 10],
            [120, 10],
            [180, 10],
            [240, 10],
            [300, 10],
        ]
        mock_linger.return_value = linger_data
        mock_histogram.return_value = 'http://image-url-here'
        mock_upload.return_value = 'http://image-url-here'

        slug = 'x-y-z'
        linger = NPRLingerRate()

        class FakeMessage(object):
            body = {'text': 'check slug ' + slug}

        clear_stories()
        Story.create(name='example',
                     slug=slug,
                     date=datetime.datetime.now(),
                     url='example.com',
                     team='deafult')

        message = linger.handle_slug_inquiry(FakeMessage)
        print message
        assert u'*100* people spent a median *55 seconds* on `x-y-z`' in message[
            'text']
        self.assertEqual(message['attachments'][0]['title'], slug)
コード例 #12
0
    def test_start_tracking(self, mock_get_channel_name):
        """
        Test if we can start tracking a new story given only a NPR URL and a
        graphic slug
        """
        mock_get_channel_name.return_value = 'default-channel'
        clear_stories()
        tracker = NPRStartTracking()

        class FakeMessage(object):
            body = {
                'text': '@carebot track slug-a-b-c on http://www.npr.org/sections/13.7/2016/02/16/466109612/was-einstein-wrong',
                'channel': 'default-channel'
            }

        expected = "Ok, I've started tracking `slug-a-b-c` on http://www.npr.org/sections/13.7/2016/02/16/466109612/was-einstein-wrong"
        message = tracker.respond(FakeMessage)
        print message
        assert expected in message['text']

        results = Story.select()
        self.assertEqual(len(results), 1)
        self.assertEqual(results[0].url, 'http://www.npr.org/sections/13.7/2016/02/16/466109612/was-einstein-wrong')
コード例 #13
0
    def test_start_tracking(self, mock_get_channel_name):
        """
        Test if we can start tracking a new story given only a NPR URL and a
        graphic slug
        """
        mock_get_channel_name.return_value = "default-channel"
        clear_stories()
        tracker = NPRStartTracking()

        class FakeMessage(object):
            body = {
                "text": "@carebot track slug-a-b-c on http://www.npr.org/sections/13.7/2016/02/16/466109612/was-einstein-wrong",
                "channel": "default-channel",
            }

        expected = "Ok, I've started tracking `slug-a-b-c` on http://www.npr.org/sections/13.7/2016/02/16/466109612/was-einstein-wrong"
        message = tracker.respond(FakeMessage)
        print message
        assert expected in message["text"]

        results = Story.select()
        self.assertEqual(len(results), 1)
        self.assertEqual(results[0].url, "http://www.npr.org/sections/13.7/2016/02/16/466109612/was-einstein-wrong")