Beispiel #1
0
 def test_british(self):
     with NamedTemporaryFile() as f:
         args, sys, db = bootstrap_file(f, m(V), '5', mm(DEV), configurator=default)
         plan = twelve_week_improver('2018-07-25')
         plan.create(db)
         # run('sqlite3 %s ".dump"' % f.name, shell=True)
         with db.session_context() as s:
             root = s.query(DiaryTopic).filter(DiaryTopic.parent_id == None, DiaryTopic.name == 'Plan').one()
             self.assertEqual(len(root.children), 1)
             self.assertTrue(root.schedule)
             self.assertEqual(root.schedule.start, to_date('2018-07-25'))
             self.assertEqual(root.schedule.finish, add_date('2018-07-25', (12, 'w')))
             parent = root.children[0]
             self.assertEqual(len(parent.children), 7)
             for child in parent.children:
                 print(child)
Beispiel #2
0
 def test_extended(self):
     # from the spec doc
     self.assert_str(Schedule('x2018-10-07-'), 'x2018-10-07-')
     self.assert_str(Schedule('x[1,2,3]2020-03-06-'), 'x[1,2,3]2020-03-06-')
     self.assert_str(Schedule('x[fri]2020-03-06-'), 'x[fri]2020-03-06-')
     self.assert_bad(Schedule, 'x[1,2,3]')  # no range start
     self.assert_bad(Schedule, '2018-10-07x')  # no offset
     self.assert_bad(Schedule, '2x')  # no multiple repeat
     self.assert_at('x', '2018-07-06', True)
     # single day range
     self.assert_at('x2018-07-06', '2018-07-05', False)
     self.assert_at('x2018-07-06', '2018-07-06', True)
     self.assert_at('x2018-07-06', '2018-07-07', False)
     # from date onwards
     self.assert_at('x2018-07-06-', '2018-07-05', False)
     self.assert_at('x2018-07-06-', '2018-07-06', True)
     self.assert_at('x2018-07-06-', '2018-07-07', True)
     # until date
     self.assert_at('x-2018-07-06', '2018-07-05', True)
     self.assert_at('x-2018-07-06', '2018-07-06', False)
     self.assert_at('x-2018-07-06', '2018-07-07', False)
     # numbered locations with start of range
     self.assert_at('x[1,2,3]2018-07-06-', '2018-07-05', False)
     self.assert_at('x[1,2,3]2018-07-06-', '2018-07-06', True)
     self.assert_at('x[1,2,3]2018-07-06-', '2018-07-07', True)
     self.assert_at('x[1,2,3]2018-07-06-', '2018-07-08', True)
     self.assert_at('x[1,2,3]2018-07-06-', '2018-07-09', False)
     # named locations with start of range
     self.assert_at('x[fri]2018-07-06-', '2018-07-05', False)
     self.assert_at('x[fri]2018-07-06-', '2018-07-06', True)
     self.assert_at('x[fri]2018-07-06-', '2018-07-07', False)
     # named locations with no range
     self.assert_at('x[fri]', '2018-07-05', False)
     self.assert_at('x[fri]', '2018-07-06', True)
     self.assert_at('x[fri]', '2018-07-07', False)
     s = Schedule('x')
     self.assertEqual(s.start_of_frame('2018-01-02'), to_date('1970-01-02'))
     self.assertEqual(s.start, None)
     self.assertEqual(s.finish, None)
     # this is not None because it is used to set end dates for repeating intervals
     self.assertEqual(s.next_frame('2018-01-02'), POS_INFINITY)
Beispiel #3
0
    def test_sources(self):

        with TemporaryDirectory() as f:

            args, data = bootstrap_dir(f, m(V), '5', configurator=acooke)

            with data.db.session_context() as s:

                # add a diary entry

                journal = add(s, DiaryTopicJournal(date='2018-09-29'))
                cache = journal.cache(s)
                diary = s.query(DiaryTopic).filter(DiaryTopic.title == 'Status').one()
                fields = diary.fields
                self.assertEqual(len(fields), 6, list(enumerate(map(str, fields))))
                self.assertEqual(fields[0].statistic_name.name, 'notes')
                self.assertEqual(fields[1].statistic_name.name, 'weight', str(fields[1]))
                statistics = [cache[field] for field in fields]
                for statistic in statistics:
                    self.assertTrue(statistic.value is None, statistics)
                statistics[0].value = 'hello world'
                statistics[1].value = 64.5

            with data.db.session_context() as s:

                # check the diary entry was persisted

                journal = DiaryTopicJournal.get_or_add(s, '2018-09-29')
                cache = journal.cache(s)
                diary = s.query(DiaryTopic).filter(DiaryTopic.title == 'Status').one()
                fields = diary.fields
                self.assertEqual(len(fields), 6, list(enumerate(map(str, fields))))
                self.assertEqual(fields[0].statistic_name.name, 'notes')
                self.assertEqual(fields[1].statistic_name.name, 'weight', str(fields[1]))
                statistics = [cache[field] for field in fields]
                self.assertEqual(statistics[1].value, 64.5)
                self.assertEqual(statistics[1].type, StatisticJournalType.FLOAT)

            # generate summary stats

            SummaryCalculator(data, schedule='m').run()
            SummaryCalculator(data, schedule='y').run()

            with data.db.session_context() as s:

                # check the summary stats

                diary = s.query(DiaryTopic).filter(DiaryTopic.title == 'Status').one()
                weights = s.query(StatisticJournal).join(StatisticName). \
                               filter(StatisticName.owner == diary, StatisticName.name == 'weight'). \
                               order_by(StatisticJournal.time).all()
                self.assertEqual(len(weights), 2)
                self.assertEqual(weights[1].value, 64.5)
                self.assertEqual(len(weights[1].measures), 2, weights[1].measures)
                self.assertEqual(weights[1].measures[0].rank, 1)
                self.assertEqual(weights[1].measures[0].percentile, 100, weights[1].measures[0].percentile)
                n = s.query(count(StatisticJournalFloat.id)).scalar()
                self.assertEqual(n, 4, n)
                n = s.query(count(StatisticJournalInteger.id)).scalar()
                self.assertEqual(n, 6, n)
                m_avg = s.query(StatisticJournalFloat).join(StatisticName). \
                    filter(StatisticName.name == 'avg-month-weight').one()
                self.assertEqual(m_avg.value, 64.5)
                y_avg = s.query(StatisticJournalFloat).join(StatisticName). \
                    filter(StatisticName.name == 'avg-year-weight').one()
                self.assertEqual(y_avg.value, 64.5)
                month = s.query(Interval).filter(Interval.schedule == 'm').first()
                self.assertEqual(month.start, to_date('2018-09-01'), month.start)
                self.assertEqual(month.finish, to_date('2018-10-01'), month.finish)

            with data.db.session_context() as s:

                # delete the diary entry

                journal = DiaryTopicJournal.get_or_add(s, '2018-09-29')
                s.delete(journal)

            with data.db.session_context() as s:

                # check the delete cascade

                self.assertEqual(s.query(count(DiaryTopicJournal.id)).scalar(), 1)
                # this should be zero because the Intervals were automatically deleted
                # (well, now +1 because there's an original default weight)
                for source in s.query(Source).all():
                    print(source)
                for journal in s.query(StatisticJournal).all():
                    print(journal)
                self.assertEqual(s.query(count(Source.id)).scalar(), 37, list(map(str, s.query(Source).all())))  # constants
                self.assertEqual(s.query(count(StatisticJournalText.id)).scalar(), 13, s.query(count(StatisticJournalText.id)).scalar())
                self.assertEqual(s.query(count(StatisticJournal.id)).scalar(), 22, s.query(count(StatisticJournal.id)).scalar())
Beispiel #4
0
    def test_sources(self):

        with NamedTemporaryFile() as f:

            args, db = bootstrap_file(f, m(V), '5', configurator=acooke)

            with db.session_context() as s:

                # add a diary entry

                diary = s.query(Topic).filter(Topic.name == 'Diary').one()
                d = add(s, TopicJournal(topic=diary, date='2018-09-29'))
                d.populate(log, s)
                self.assertEqual(len(d.topic.fields), 9,
                                 list(enumerate(map(str, d.topic.fields))))
                self.assertEqual(d.topic.fields[0].statistic_name.name,
                                 'Notes')
                self.assertEqual(d.topic.fields[1].statistic_name.name,
                                 'Weight', str(d.topic.fields[1]))
                for field in d.topic.fields:
                    if field in d.statistics:
                        self.assertTrue(d.statistics[field].value is None,
                                        field)
                d.statistics[d.topic.fields[0]].value = 'hello world'
                d.statistics[d.topic.fields[1]].value = 64.5

            with db.session_context() as s:

                # check the diary entry was persisted

                diary = s.query(Topic).filter(Topic.name == 'Diary').one()
                d = s.query(TopicJournal).filter(
                    TopicJournal.topic == diary,
                    TopicJournal.date == '2018-09-29').one()
                s.flush()
                d.populate(log, s)
                self.assertEqual(len(d.topic.fields), 9,
                                 list(enumerate(map(str, d.topic.fields))))
                self.assertEqual(d.topic.fields[0].statistic_name.name,
                                 'Notes')
                self.assertEqual(d.statistics[d.topic.fields[0]].value,
                                 'hello world')
                self.assertEqual(d.topic.fields[1].statistic_name.name,
                                 'Weight')
                self.assertEqual(d.statistics[d.topic.fields[1]].value, 64.5)
                self.assertEqual(d.statistics[d.topic.fields[1]].type,
                                 StatisticJournalType.FLOAT)

            # generate summary stats

            SummaryCalculator(db, schedule='m').run()
            SummaryCalculator(db, schedule='y').run()

            with db.session_context() as s:

                # check the summary stats

                diary = s.query(Topic).filter(Topic.name == 'Diary').one()
                weight = s.query(StatisticJournal).join(StatisticName). \
                    filter(StatisticName.owner == diary, StatisticName.name == 'Weight').one()
                self.assertEqual(weight.value, 64.5)
                self.assertEqual(len(weight.measures), 2, weight.measures)
                self.assertEqual(weight.measures[0].rank, 1)
                self.assertEqual(weight.measures[0].percentile, 100,
                                 weight.measures[0].percentile)
                n = s.query(count(StatisticJournalFloat.id)).scalar()
                self.assertEqual(n, 4, n)
                n = s.query(count(StatisticJournalInteger.id)).scalar()
                self.assertEqual(n, 11, n)
                m_avg = s.query(StatisticJournalFloat).join(StatisticName). \
                    filter(StatisticName.name == 'Avg/Month Weight').one()
                self.assertEqual(m_avg.value, 64.5)
                y_avg = s.query(StatisticJournalFloat).join(StatisticName). \
                    filter(StatisticName.name == 'Avg/Year Weight').one()
                self.assertEqual(y_avg.value, 64.5)
                month = s.query(Interval).filter(
                    Interval.schedule == 'm').one()
                self.assertEqual(month.start, to_date('2018-09-01'),
                                 month.start)
                self.assertEqual(month.finish, to_date('2018-10-01'),
                                 month.finish)

            with db.session_context() as s:

                # delete the diary entry

                diary = s.query(Topic).filter(Topic.name == 'Diary').one()
                d = s.query(TopicJournal).filter(
                    TopicJournal.topic == diary,
                    TopicJournal.date == '2018-09-29').one()
                s.delete(d)

            run('sqlite3 %s ".dump"' % f.name, shell=True)

            with db.session_context() as s:

                # check the delete cascade

                self.assertEqual(s.query(count(TopicJournal.id)).scalar(), 0)
                # this should be zero because the Intervals were automatically deleted
                for source in s.query(Source).all():
                    print(source)
                for journal in s.query(StatisticJournal).all():
                    print(journal)
                self.assertEqual(
                    s.query(count(Source.id)).scalar(), 11,
                    list(map(str,
                             s.query(Source).all())))  # constants
                self.assertEqual(
                    s.query(count(StatisticJournalText.id)).scalar(), 7,
                    s.query(count(StatisticJournalText.id)).scalar())
                self.assertEqual(
                    s.query(count(StatisticJournal.id)).scalar(), 7,
                    s.query(count(StatisticJournal.id)).scalar())
Beispiel #5
0
 def assert_at(self, spec, date, at_location):
     date = to_date(date)
     self.assertEqual(Schedule(spec).at_location(date), at_location)
Beispiel #6
0
 def test_frame_start(self):
     s = Schedule('2018-01-01/2y')
     self.assertEqual(s.start_of_frame('2018-01-02'), to_date('2018-01-01'))
     self.assertEqual(s.start_of_frame('2017-01-02'), to_date('2016-01-01'))