Ejemplo n.º 1
0
 def create(cls, s, *sources):
     composite = add(s, Composite(n_components=0))
     for source in sources:
         add(
             s,
             CompositeComponent(input_source=source,
                                output_source=composite))
         composite.n_components += 1
     return composite
Ejemplo n.º 2
0
def injury_notes(old_name, new_name):
    injury_id = next(
        old.execute('''select id from injury where name like ?''',
                    [old_name]))[0]
    topic = s.query(DiaryTopic).filter(DiaryTopic.name == new_name).one()
    notes = s.query(StatisticName).filter(
        StatisticName.name == 'Notes',
        StatisticName.constraint == topic.id).one()
    for row in old.execute(
            '''select date, notes from injury_diary where injury_id = ?''',
        [injury_id]):
        if row['notes']:
            # print(row['notes'], len(row['notes']))
            tj = add(s,
                     DiaryTopicJournal(time=to_time(row['date']), topic=topic))
            add(
                s,
                StatisticJournalText(statistic_name=notes,
                                     source=tj,
                                     value=row['notes']))
Ejemplo n.º 3
0
 def test_set(self):
     with NamedTemporaryFile() as f:
         args, sys, db = bootstrap_file(f, m(V), '5')
         bootstrap_file(f, m(V), '5', mm(DEV), configurator=default)
         with db.session_context() as s:
             source = add(s, Source())
             n = s.query(count(Timestamp.id)).scalar()
             self.assertEqual(n, 0)
             Timestamp.set(s, TestTimestamp, source=source)
             n = s.query(count(Timestamp.id)).scalar()
             self.assertEqual(n, 1)
             t = s.query(Timestamp).filter(
                 Timestamp.owner == TestTimestamp).one()
             self.assertAlmostEqual(t.time.timestamp(),
                                    dt.datetime.now().timestamp(), 1)
Ejemplo n.º 4
0
 def test_set(self):
     user = random_test_user()
     bootstrap_db(user, m(V), '5')
     config = bootstrap_db(user, m(V), '5', mm(DEV), configurator=default)
     with config.db.session_context() as s:
         source = add(s, Source())
         n = s.query(count(Timestamp.id)).scalar()
         self.assertEqual(n, 0)
         Timestamp.set(s, TestTimestamp, source=source)
         n = s.query(count(Timestamp.id)).scalar()
         self.assertEqual(n, 1)
         t = s.query(Timestamp).filter(
             Timestamp.owner == TestTimestamp).one()
         self.assertAlmostEqual(t.time.timestamp(),
                                dt.datetime.now().timestamp(), 1)
Ejemplo n.º 5
0
def find_and_add_sector_journals(s, sector_group, ajournal, sector_id=None):
    sql = text('''
with srid as (select s.id as sector_id,
                     st_setsrid(s.route, sg.srid) as sector,
                     st_transform(aj.route_et::geometry, sg.srid) as route_et,
                     st_transform(aj.route_d::geometry, sg.srid) as route_d,
                     st_setsrid(s.start, sg.srid) as start,
                     st_setsrid(s.finish, sg.srid) as finish
                from sector as s,
                     activity_journal as aj,
                     sector_group as sg
               where s.sector_group_id = sg.id
                 and s.id = coalesce(:sector_id, s.id)
                 and sg.id = :sector_group_id
                 and aj.id = :activity_journal_id
                 and st_intersects(st_setsrid(s.hull, sg.srid), st_transform(aj.route_d::geometry, sg.srid))),
     start_point as (select r.sector_id,
                            r.route_et,
                            (st_dump(st_multi(st_intersection(r.start, st_force2d(r.route_et))))).geom as point
                       from srid as r),
     start_fraction as (select p.sector_id,
                               st_linelocatepoint(p.route_et, p.point) as fraction
                          from start_point as p
                         where st_geometrytype(p.point) = 'ST_Point'),  -- small number of cases intersect as lines
     finish_point as (select r.sector_id,
                             r.route_et,
                             (st_dump(st_multi(st_intersection(r.finish, st_force2d(r.route_et))))).geom as point
                        from srid as r),
     finish_fraction as (select p.sector_id,
                                st_linelocatepoint(p.route_et, p.point) as fraction
                           from finish_point as p
                          where st_geometrytype(p.point) = 'ST_Point'),
     shortest as (select distinct  -- multiple starts/finishes can lead to duplicates
                         r.sector_id,
                         s.fraction as start_fraction,
                         f.fraction as finish_fraction,
                         min(f.fraction - s.fraction) over (partition by r.sector_id) as shortest
                    from srid as r,
                         start_fraction as s,
                         finish_fraction as f
                   where s.fraction < f.fraction
                     and s.sector_id = f.sector_id
                     and s.sector_id = r.sector_id
                     and st_length(st_linesubstring(r.route_d, s.fraction, f.fraction))
                         between 0.95 * st_length(r.sector) and 1.05 * st_length(r.sector))
select s.sector_id,
       s.start_fraction,
       s.finish_fraction,
       aj.start + interval '1' second * st_m(st_lineinterpolatepoint(r.route_et, s.start_fraction)) as start_time,
       aj.start + interval '1' second * st_m(st_lineinterpolatepoint(r.route_et, s.finish_fraction)) as finish_time,
       st_m(st_lineinterpolatepoint(r.route_d, s.start_fraction)) as start_distance,
       st_m(st_lineinterpolatepoint(r.route_d, s.finish_fraction)) as finish_distance,
       st_z(st_lineinterpolatepoint(r.route_et, s.start_fraction)) as start_elevation,
       st_z(st_lineinterpolatepoint(r.route_et, s.finish_fraction)) as finish_elevation
  from srid as r,
       shortest as s,
       activity_journal as aj
 where aj.id = :activity_journal_id
   and r.sector_id = s.sector_id
   and s.finish_fraction - s.start_fraction = s.shortest
''')
    log.debug(sql)
    result = s.connection().execute(sql,
                                    sector_group_id=sector_group.id,
                                    activity_journal_id=ajournal.id,
                                    sector_id=sector_id)
    for row in result.fetchall():
        data = {name: value for name, value in zip(result.keys(), row)}
        log.debug(
            f'Adding SectorJournal for activity_journal_id {ajournal.id}, '
            f'activity_group {ajournal.activity_group}: {data}')
        sjournal = add(
            s,
            SectorJournal(activity_journal_id=ajournal.id,
                          activity_group=ajournal.activity_group,
                          **data))
        s.flush()
        yield sjournal
Ejemplo n.º 6
0
    def test_sources(self):

        with TemporaryDirectory() as f:

            args, data = bootstrap_dir(f, m(V), '5', configurator=acooke)

            with data.db.session_context() as s:

                # add a diary entry

                journal = add(s, DiaryTopicJournal(date='2018-09-29'))
                cache = journal.cache(s)
                diary = s.query(DiaryTopic).filter(DiaryTopic.title == 'Status').one()
                fields = diary.fields
                self.assertEqual(len(fields), 6, list(enumerate(map(str, fields))))
                self.assertEqual(fields[0].statistic_name.name, 'notes')
                self.assertEqual(fields[1].statistic_name.name, 'weight', str(fields[1]))
                statistics = [cache[field] for field in fields]
                for statistic in statistics:
                    self.assertTrue(statistic.value is None, statistics)
                statistics[0].value = 'hello world'
                statistics[1].value = 64.5

            with data.db.session_context() as s:

                # check the diary entry was persisted

                journal = DiaryTopicJournal.get_or_add(s, '2018-09-29')
                cache = journal.cache(s)
                diary = s.query(DiaryTopic).filter(DiaryTopic.title == 'Status').one()
                fields = diary.fields
                self.assertEqual(len(fields), 6, list(enumerate(map(str, fields))))
                self.assertEqual(fields[0].statistic_name.name, 'notes')
                self.assertEqual(fields[1].statistic_name.name, 'weight', str(fields[1]))
                statistics = [cache[field] for field in fields]
                self.assertEqual(statistics[1].value, 64.5)
                self.assertEqual(statistics[1].type, StatisticJournalType.FLOAT)

            # generate summary stats

            SummaryCalculator(data, schedule='m').run()
            SummaryCalculator(data, schedule='y').run()

            with data.db.session_context() as s:

                # check the summary stats

                diary = s.query(DiaryTopic).filter(DiaryTopic.title == 'Status').one()
                weights = s.query(StatisticJournal).join(StatisticName). \
                               filter(StatisticName.owner == diary, StatisticName.name == 'weight'). \
                               order_by(StatisticJournal.time).all()
                self.assertEqual(len(weights), 2)
                self.assertEqual(weights[1].value, 64.5)
                self.assertEqual(len(weights[1].measures), 2, weights[1].measures)
                self.assertEqual(weights[1].measures[0].rank, 1)
                self.assertEqual(weights[1].measures[0].percentile, 100, weights[1].measures[0].percentile)
                n = s.query(count(StatisticJournalFloat.id)).scalar()
                self.assertEqual(n, 4, n)
                n = s.query(count(StatisticJournalInteger.id)).scalar()
                self.assertEqual(n, 6, n)
                m_avg = s.query(StatisticJournalFloat).join(StatisticName). \
                    filter(StatisticName.name == 'avg-month-weight').one()
                self.assertEqual(m_avg.value, 64.5)
                y_avg = s.query(StatisticJournalFloat).join(StatisticName). \
                    filter(StatisticName.name == 'avg-year-weight').one()
                self.assertEqual(y_avg.value, 64.5)
                month = s.query(Interval).filter(Interval.schedule == 'm').first()
                self.assertEqual(month.start, to_date('2018-09-01'), month.start)
                self.assertEqual(month.finish, to_date('2018-10-01'), month.finish)

            with data.db.session_context() as s:

                # delete the diary entry

                journal = DiaryTopicJournal.get_or_add(s, '2018-09-29')
                s.delete(journal)

            with data.db.session_context() as s:

                # check the delete cascade

                self.assertEqual(s.query(count(DiaryTopicJournal.id)).scalar(), 1)
                # this should be zero because the Intervals were automatically deleted
                # (well, now +1 because there's an original default weight)
                for source in s.query(Source).all():
                    print(source)
                for journal in s.query(StatisticJournal).all():
                    print(journal)
                self.assertEqual(s.query(count(Source.id)).scalar(), 37, list(map(str, s.query(Source).all())))  # constants
                self.assertEqual(s.query(count(StatisticJournalText.id)).scalar(), 13, s.query(count(StatisticJournalText.id)).scalar())
                self.assertEqual(s.query(count(StatisticJournal.id)).scalar(), 22, s.query(count(StatisticJournal.id)).scalar())
Ejemplo n.º 7
0
fields = dict((field.statistic_name.name, field.statistic_name)
              for field in diary.fields)
notes = fields['Notes']
mood = fields['Mood']
hr = fields['Rest HR']
weight = fields['Weight']
sleep = fields['Sleep']
weather = fields['Weather']
meds = fields['Medication']

for row in old.execute(
        '''select date, notes, rest_heart_rate, sleep, mood, weather, medication, weight from diary''',
    []):
    if row['notes'] or row['mood'] or row['rest_heart_rate'] or row[
            'weight'] or row['sleep'] or row['weather']:
        tj = add(s, DiaryTopicJournal(time=to_time(row['date']), topic=diary))
        if row['notes']:
            add(
                s,
                StatisticJournalText(statistic_name=notes,
                                     source=tj,
                                     value=row['notes']))
        if row['mood']:
            add(
                s,
                StatisticJournalInteger(statistic_name=mood,
                                        source=tj,
                                        value=row['mood']))
        if row['rest_heart_rate']:
            add(
                s,
Ejemplo n.º 8
0
    def test_sources(self):

        with NamedTemporaryFile() as f:

            args, sys, db = bootstrap_file(f, m(V), '5', configurator=acooke)

            with db.session_context() as s:

                # add a diary entry

                journal = add(s, DiaryTopicJournal(date='2018-09-29'))
                cache = journal.cache(s)
                diary = s.query(DiaryTopic).filter(
                    DiaryTopic.name == 'Diary').one()
                fields = diary.fields
                self.assertEqual(len(fields), 9,
                                 list(enumerate(map(str, fields))))
                self.assertEqual(fields[0].statistic_name.name, 'Notes')
                self.assertEqual(fields[1].statistic_name.name, 'Weight',
                                 str(fields[1]))
                statistics = [cache[field] for field in fields]
                for statistic in statistics:
                    self.assertTrue(statistic.value is None, statistics)
                statistics[0].value = 'hello world'
                statistics[1].value = 64.5

            with db.session_context() as s:

                # check the diary entry was persisted

                journal = DiaryTopicJournal.get_or_add(s, '2018-09-29')
                cache = journal.cache(s)
                diary = s.query(DiaryTopic).filter(
                    DiaryTopic.name == 'Diary').one()
                fields = diary.fields
                self.assertEqual(len(fields), 9,
                                 list(enumerate(map(str, fields))))
                self.assertEqual(fields[0].statistic_name.name, 'Notes')
                self.assertEqual(fields[1].statistic_name.name, 'Weight',
                                 str(fields[1]))
                statistics = [cache[field] for field in fields]
                self.assertEqual(statistics[1].value, 64.5)
                self.assertEqual(statistics[1].type,
                                 StatisticJournalType.FLOAT)

            # generate summary stats

            SummaryCalculator(sys, db, schedule='m').run()
            SummaryCalculator(sys, db, schedule='y').run()

            with db.session_context() as s:

                # check the summary stats

                diary = s.query(DiaryTopic).filter(
                    DiaryTopic.name == 'Diary').one()
                weight = s.query(StatisticJournal).join(StatisticName). \
                    filter(StatisticName.owner == diary, StatisticName.name == 'Weight').one()
                self.assertEqual(weight.value, 64.5)
                self.assertEqual(len(weight.measures), 2, weight.measures)
                self.assertEqual(weight.measures[0].rank, 1)
                self.assertEqual(weight.measures[0].percentile, 100,
                                 weight.measures[0].percentile)
                n = s.query(count(StatisticJournalFloat.id)).scalar()
                self.assertEqual(n, 5, n)
                n = s.query(count(StatisticJournalInteger.id)).scalar()
                self.assertEqual(n, 10, n)
                m_avg = s.query(StatisticJournalFloat).join(StatisticName). \
                    filter(StatisticName.name == 'Avg/Month Weight').one()
                self.assertEqual(m_avg.value, 64.5)
                y_avg = s.query(StatisticJournalFloat).join(StatisticName). \
                    filter(StatisticName.name == 'Avg/Year Weight').one()
                self.assertEqual(y_avg.value, 64.5)
                month = s.query(Interval).filter(
                    Interval.schedule == 'm').one()
                self.assertEqual(month.start, to_date('2018-09-01'),
                                 month.start)
                self.assertEqual(month.finish, to_date('2018-10-01'),
                                 month.finish)

            with db.session_context() as s:

                # delete the diary entry

                journal = DiaryTopicJournal.get_or_add(s, '2018-09-29')
                s.delete(journal)

            run('sqlite3 %s ".dump"' % f.name, shell=True)

            with db.session_context() as s:

                # check the delete cascade

                self.assertEqual(
                    s.query(count(DiaryTopicJournal.id)).scalar(), 0)
                # this should be zero because the Intervals were automatically deleted
                for source in s.query(Source).all():
                    print(source)
                for journal in s.query(StatisticJournal).all():
                    print(journal)
                self.assertEqual(
                    s.query(count(Source.id)).scalar(), 15,
                    list(map(str,
                             s.query(Source).all())))  # constants
                self.assertEqual(
                    s.query(count(StatisticJournalText.id)).scalar(), 8,
                    s.query(count(StatisticJournalText.id)).scalar())
                self.assertEqual(
                    s.query(count(StatisticJournal.id)).scalar(), 8,
                    s.query(count(StatisticJournal.id)).scalar())