def test_main(self):

        date1 = datetime.datetime(2015, 9, 10).date()
        date2 = datetime.datetime(2015, 9, 15).date()
        
        days_between = data_science_jobs.get_days_between(date1, date2)

        expected_days_between = [
            datetime.datetime(2015, 9, 11).date(),
            datetime.datetime(2015, 9, 12).date(),
            datetime.datetime(2015, 9, 13).date(),
            datetime.datetime(2015, 9, 14).date(),
        ]
        self.assertEqual(days_between, expected_days_between)
def update_daily_summaries():
    logger.info('Update Daily Summaries')
    previous_session = ScrapingSession.get_previous_session()
    logger.info('Previous Scraping Session: {}'.format(previous_session.datetime if previous_session else None))
    if previous_session == None:
        return
    last_summary = DailySummary.get_last_summary()
    logger.info('Last Daily Summary: {}'.format(last_summary.date if last_summary else None))    
    if last_summary == None:
        start_date = JobListing.get_earliest_job_listing().added - datetime.timedelta(days=1)
    else:
        start_date = last_summary.date
    dates_between = data_science_jobs.get_days_between(
        start_date,
        previous_session.datetime.date())
    if len(dates_between):
        logger.info('Getting Daily Summaries Between: {} - {}'.format(dates_between[0], dates_between[-1]))
    else:
        logger.info('Daily Summaries Up-To-Date')
    for date in dates_between:
        daily_summary = DailySummary.create(date=date)
        logger.info('{}: n_posts: {}'.format(date, daily_summary.n_posts))
        daily_summary.save()