def write(self, stories, team=None): new_stories = [] for story in stories: info_from_api = npr_api_scraper.get_story_details(story['story_url']) if not info_from_api: logger.info('Not adding %s to database: could not get story' % (story['story_headline'])) pass exists = Story.select().where(Story.url == story['story_url']) if exists: logger.info('Not adding %s to database: already exists' % (story['story_headline'])) else: try: screenshot_url = screenshotter.get_story_image(story['story_url']) story = Story.create( name = story['story_headline'].strip(), slug = story['graphic_slug'].strip(), date = info_from_api['date'], story_type = story['graphic_type'].strip(), url = story['story_url'].strip(), image = info_from_api['image'], team = team, screenshot = screenshot_url ) new_stories.append(story) except IntegrityError: # Story probably already exists. logger.info('Not adding %s to database: probably already exists' % (story['story_headline'])) pass return new_stories
def write(self, stories, team=None): """ Save rows to the database """ new_stories = [] for story in stories: slug = story['official flavor description'] + ' - ' + story[ 'taster'] try: story = Story.create( name=story['name'].strip(), slug=slug, date=PockyScraper.parse_date(story['date tasted']), story_type='pocky', team=team, ) logger.info('Added {0}'.format(story.name)) new_stories.append(story) except IntegrityError: # Story probably already exists. logger.info( 'Not adding %s to database: probably already exists' % (slug)) pass return new_stories
def write(self, stories, team=None): new_stories = [] for story in stories: info_from_api = npr_api_scraper.get_story_details(story['story_url']) if not info_from_api: logger.info('Not adding %s to database: could not get story' % (story['story_headline'])) try: story = Story.create( name = story['story_headline'], slug = story['graphic_slug'], date = info_from_api['date'], article_posted = info_from_api['date'], story_type = story['graphic_type'], url = story['story_url'], image = info_from_api['image'], team = team ) new_stories.append(story) except IntegrityError: # Story probably already exists. logger.info('Not adding %s to database: probably already exists' % (story['story_headline'])) pass return new_stories
def test_handle_slug_inquiry(self, mock_upload, mock_histogram, mock_linger, ): # Set some fake analytics linger_data = [ [10, 10], [20, 10], [30, 10], [40, 10], [50, 10], [60, 10], [120, 10], [180, 10], [240, 10], [300, 10], ] mock_linger.return_value = linger_data mock_histogram.return_value = 'http://image-url-here' mock_upload.return_value = 'http://image-url-here' slug = 'x-y-z' linger = NPRLingerRate() class FakeMessage(object): body = { 'text': 'check slug ' + slug } clear_stories() Story.create( name = 'example', slug = slug, date = datetime.datetime.now(), url = 'example.com', team = 'deafult' ) message = linger.handle_slug_inquiry(FakeMessage) print message assert u'*100* people spent a median *55 seconds* on `x-y-z`' in message['text'] self.assertEqual(message['attachments'][0]['title'], slug)
def respond(self, message): m = re.search(self.START_TRACKING_REGEX, message.body['text']) url = re.search(self.GRUBER_URLINTEXT_PAT, message.body['text']) if not m: return False slug = m.group(1) url = url.group(1) if slug: # Check if the slug is in the database. try: story = Story.select().where(Story.url.contains(url)).get() story.slug = slug story.save() text = "Ok! I'm already tracking `%s`, and I've updated the slug." % url except Story.DoesNotExist: # If it's not in the database, start tracking it. if not url: logger.error("Couldn't find story URL in message %s", message.body['text']) text = "Sorry, I need a story URL to start tracking." return details = npr_api_scraper.get_story_details(url) if not details: logger.error("Couldn't find story in API for URL %s", url) text = "Sorry, I wasn't able to find that story in the API, so I couldn't start tracking it." return # Find out what team we need to save this story to channel = slack_tools.get_channel_name(message.body['channel']) team = self.config.get_team_for_channel(channel) # Create the story story = Story.create(name=details['title'], slug=slug, date=details['date'], url=url, image=details['image'], team=team ) story.save() text = "Ok, I've started tracking `%s` on %s. The first stats should arrive in 4 hours or less." % (slug, url) else: text = "Sorry, I wasn't able to start tracking `%s` right now." % slug if text: return { 'text': text }
def start_tracking(message): m = re.search(START_TRACKING_REGEX, message.body['text']) if not m: return False slug = m.group(1) if slug: # Check if the slug is in the database. try: story = Story.select().where(Story.slug.contains(slug)).get() message.reply( "Thanks! I'm already tracking `%s`, and you should start seeing results within a couple hours." % slug) except Story.DoesNotExist: # If it's not in the database, start tracking it. url = re.search(GRUBER_URLINTEXT_PAT, message.body['text']) if not url: logger.error("Couldn't find story URL in message %s", message.body['text']) message.reply("Sorry, I need a story URL to start tracking.") return details = npr_api_scraper.get_story_details(url.group(1)) if not details: logger.error("Couldn't find story in API for URL %s", url.group(1)) message.reply( "Sorry, I wasn't able to find that story in the API, so I couldn't start tracking it." ) return # Find out what team we need to save this story to channel = slackTools.get_channel_name(message.body['channel']) team = config.get_team_for_channel(channel) # Create the story story = Story.create(name=details['title'], slug=slug, date=details['date'], url=url.group(1), image=details['image'], team=team) story.save() message.reply( "Ok, I've started tracking `%s`. The first stats should arrive in 4 hours or less." % slug) else: message.reply( "Sorry, I wasn't able to start tracking `%s` right now." % slug)
def test_handle_slug_inquiry( self, mock_upload, mock_histogram, mock_linger, ): # Set some fake analytics linger_data = [ [10, 10], [20, 10], [30, 10], [40, 10], [50, 10], [60, 10], [120, 10], [180, 10], [240, 10], [300, 10], ] mock_linger.return_value = linger_data mock_histogram.return_value = 'http://image-url-here' mock_upload.return_value = 'http://image-url-here' slug = 'x-y-z' linger = NPRLingerRate() class FakeMessage(object): body = {'text': 'check slug ' + slug} clear_stories() Story.create(name='example', slug=slug, date=datetime.datetime.now(), url='example.com', team='deafult') message = linger.handle_slug_inquiry(FakeMessage) print message assert u'*100* people spent a median *55 seconds* on `x-y-z`' in message[ 'text'] self.assertEqual(message['attachments'][0]['title'], slug)
def respond(self, message): m = re.search(self.START_TRACKING_REGEX, message.body['text']) url = re.search(self.GRUBER_URLINTEXT_PAT, message.body['text']) if not m: return False slug = m.group(1) url = url.group(1) if slug: # Check if the slug is in the database. try: story = Story.select().where(Story.url.contains(url)).get() story.slug = slug story.save() text = "Ok! I'm already tracking `%s`, and I've updated the slug." % url except Story.DoesNotExist: # If it's not in the database, start tracking it. if not url: logger.error("Couldn't find story URL in message %s", message.body['text']) text = "Sorry, I need a story URL to start tracking." return details = npr_api_scraper.get_story_details(url) if not details: logger.error("Couldn't find story in API for URL %s", url) text = "Sorry, I wasn't able to find that story in the API, so I couldn't start tracking it." return # Find out what team we need to save this story to channel = slack_tools.get_channel_name(message.body['channel']) team = self.config.get_team_for_channel(channel) # Create the story story = Story.create(name=details['title'], slug=slug, date=details['date'], url=url, image=details['image'], team=team) story.save() text = "Ok, I've started tracking `%s` on %s. The first stats should arrive in 4 hours or less." % ( slug, url) else: text = "Sorry, I wasn't able to start tracking `%s` right now." % slug if text: return {'text': text}
def write(self, stories, team=None): # TODO # this should be abstracted here and in spreadsheet.py new_stories = [] for story in stories: try: story = Story.create(name=story['name'], slug=story['slug'], date=story['date'], url=story['url'], team=team) new_stories.append(story) except IntegrityError: # Story probably already exists. logger.info( 'Not adding %s to database: probably already exists' % (story['name'])) return new_stories
def write(self, stories, team=None): # TODO # this should be abstracted here and in spreadsheet.py new_stories = [] for story in stories: try: story = Story.create( name = story['name'], slug = story['slug'], date = story['date'], url = story['url'], team = team ) new_stories.append(story) except IntegrityError: # Story probably already exists. logger.info('Not adding %s to database: probably already exists' % (story['name'])) return new_stories
def write(self, stories, team=None): new_stories = [] for story in stories: info_from_api = npr_api_scraper.get_story_details( story['story_url']) if not info_from_api: logger.info('Not adding %s to database: could not get story' % (story['story_headline'])) pass exists = Story.select().where(Story.url == story['story_url']) if exists: logger.info('Not adding %s to database: already exists' % (story['story_headline'])) else: try: screenshot_url = screenshotter.get_story_image( story['story_url']) story = Story.create( name=story['story_headline'].strip(), slug=story['graphic_slug'].strip(), date=info_from_api['date'], story_type=story['graphic_type'].strip(), url=story['story_url'].strip(), image=info_from_api['image'], team=team, screenshot=screenshot_url) new_stories.append(story) except IntegrityError: # Story probably already exists. logger.info( 'Not adding %s to database: probably already exists' % (story['story_headline'])) pass return new_stories
def write(self, stories, team=None): """ Save rows to the database """ new_stories = [] for story in stories: slug = story['official flavor description'] + ' - ' + story['taster'] try: story = Story.create( name=story['name'].strip(), slug=slug, date=PockyScraper.parse_date(story['date tasted']), story_type='pocky', team=team, ) logger.info('Added {0}'.format(story.name)) new_stories.append(story) except IntegrityError: # Story probably already exists. logger.info('Not adding %s to database: probably already exists' % (slug)) pass return new_stories