def generic_bug(self, files): with NamedTemporaryFile() as f: args, sys, db = bootstrap_file(f, m(V), '5') bootstrap_file(f, m(V), '5', mm(DEV), configurator=default) for file in files: args, sys, db = bootstrap_file( f, m(V), '5', mm(DEV), 'monitor', mm(FAST), 'data/test/source/personal/[email protected]_%s.fit' % file) monitor(args, sys, db) # run('sqlite3 %s ".dump"' % f.name, shell=True) run_pipeline(sys, db, PipelineType.STATISTIC, force=True, start='2018-01-01', n_cpu=1) # run('sqlite3 %s ".dump"' % f.name, shell=True) with db.session_context() as s: # steps summary = s.query(StatisticJournal).join(StatisticName). \ filter(StatisticJournal.time >= local_date_to_time('2018-10-07'), StatisticJournal.time < local_date_to_time('2018-10-08'), StatisticName.owner == MonitorCalculator, StatisticName.name == DAILY_STEPS).one() # connect has 3031 for this date. self.assertEqual(summary.value, 3031)
def test_values(self): with NamedTemporaryFile() as f: bootstrap_file(f, m(V), '5') bootstrap_file(f, m(V), '5', mm(DEV), configurator=default) for file in ('24696157869', '24696160481', '24696163486'): args, db = bootstrap_file(f, m(V), '5', mm(DEV), 'monitor', mm(FAST), 'data/test/source/personal/[email protected]_%s.fit' % file) monitor(args, db) # run('sqlite3 %s ".dump"' % f.name, shell=True) run_pipeline(db, PipelineType.STATISTIC, force=True, start='2018-01-01', n_cpu=1) run('sqlite3 %s ".dump"' % f.name, shell=True) with db.session_context() as s: mjournals = s.query(MonitorJournal).order_by(MonitorJournal.start).all() assert mjournals[2].start == to_time('2018-09-06 15:06:00'), mjournals[2].start print(mjournals[2].fit_file) # steps summary = s.query(StatisticJournal).join(StatisticName). \ filter(StatisticJournal.time >= local_date_to_time('2018-09-06'), StatisticJournal.time < local_date_to_time('2018-09-07'), StatisticName.owner == MonitorCalculator, StatisticName.name == DAILY_STEPS).one() # connect has 12757 for this date, self.assertEqual(summary.value, 12757) # heart rate summary = s.query(StatisticJournal).join(StatisticName). \ filter(StatisticJournal.time >= local_date_to_time('2018-09-06'), StatisticJournal.time < local_date_to_time('2018-09-07'), StatisticName.owner == MonitorCalculator, StatisticName.name == REST_HR).one() self.assertEqual(summary.value, 45)
def spline(self, smooth=0): with NamedTemporaryFile() as f: bootstrap_file(f, m(V), '5', mm(DEV), configurator=default) args, db = bootstrap_file(f, m(V), '5', 'constants', '--set', SRTM1_DIR, '/home/andrew/archive/srtm1') constants(args, db) with db.session_context() as s: yield spline_elevation_from_constant(log, s, smooth=smooth)
def bilinear(self): with NamedTemporaryFile() as f: bootstrap_file(f, m(V), '5', mm(DEV), configurator=default) args, sys, db = bootstrap_file(f, m(V), '5', 'constants', 'set', SRTM1_DIR, '/home/andrew/archive/srtm1') constants(args, sys, db) with db.session_context() as s: yield bilinear_elevation_from_constant(s)
def test_context(self): with NamedTemporaryFile() as f: args, db = bootstrap_file(f, m(V), '5') bootstrap_file(f, m(V), '5', mm(DEV), configurator=default) with db.session_context() as s: with Timestamp(owner=TestTimestamp).on_success(s): n = s.query(count(Timestamp.id)).scalar() self.assertEqual(n, 0) n = s.query(count(Timestamp.id)).scalar() self.assertEqual(n, 1)
def test_constant(self): with NamedTemporaryFile() as f: args, db = bootstrap_file(f, m(V), '5') bootstrap_file(f, m(V), '5', mm(DEV), configurator=default) with db.session_context() as s: n = s.query(count(Constant.id)).scalar() self.assertEqual(n, 10) args, db = bootstrap_file(f, m(V), '5', 'constants', '--set', 'FTHR.%', '154') constants(args, db) with db.session_context() as s: n = s.query(count(Constant.id)).scalar() self.assertEqual(n, 10)
def test_920(self): for src in '920xt-2019-05-16_19-42-54.fit', '920xt-2019-05-16_19-42-54.fit': with NamedTemporaryFile() as f: bootstrap_file(f, m(V), '5') bootstrap_file(f, m(V), '5', mm(DEV), configurator=default) args, db = bootstrap_file(f, m(V), '5', mm(DEV), 'activities', mm(FAST), f'data/test/source/other/{src}') activities(args, db) # run('sqlite3 %s ".dump"' % f.name, shell=True) run_pipeline(db, PipelineType.STATISTIC, n_cpu=1) # run('sqlite3 %s ".dump"' % f.name, shell=True) with db.session_context() as s: self.__assert_basic_stats(s)
def test_florian(self): with NamedTemporaryFile() as f: bootstrap_file(f, m(V), '5') bootstrap_file(f, m(V), '5', mm(DEV), configurator=default) args, db = bootstrap_file(f, m(V), '5', mm(DEV), 'activities', mm(FAST), 'data/test/source/private/florian.fit') activities(args, db) # run('sqlite3 %s ".dump"' % f.name, shell=True) run_pipeline(db, PipelineType.STATISTIC, n_cpu=1) # run('sqlite3 %s ".dump"' % f.name, shell=True) with db.session_context() as s: self.__assert_basic_stats(s)
def test_michael(self): with NamedTemporaryFile() as f: bootstrap_file(f, m(V), '5') bootstrap_file(f, m(V), '5', mm(DEV), configurator=default) args, db = bootstrap_file(f, m(V), '5', mm(DEV), 'activities', mm(FAST), 'data/test/source/other/2019-05-09-051352-Running-iWatchSeries3.fit') activities(args, db) # run('sqlite3 %s ".dump"' % f.name, shell=True) run_pipeline(db, PipelineType.STATISTIC, n_cpu=1) # run('sqlite3 %s ".dump"' % f.name, shell=True) with db.session_context() as s: self.__assert_basic_stats(s)
def test_set(self): with NamedTemporaryFile() as f: args, db = bootstrap_file(f, m(V), '5') bootstrap_file(f, m(V), '5', mm(DEV), configurator=default) with db.session_context() as s: source = add(s, Source()) n = s.query(count(Timestamp.id)).scalar() self.assertEqual(n, 0) Timestamp.set(s, TestTimestamp, source=source) n = s.query(count(Timestamp.id)).scalar() self.assertEqual(n, 1) t = s.query(Timestamp).filter(Timestamp.owner == TestTimestamp).one() self.assertAlmostEqual(t.time.timestamp(), dt.datetime.now().timestamp(), 1)
def test_context_error(self): with NamedTemporaryFile() as f: args, db = bootstrap_file(f, m(V), '5') bootstrap_file(f, m(V), '5', mm(DEV), configurator=default) with db.session_context() as s: try: with Timestamp(owner=TestTimestamp).on_success(s): n = s.query(count(Timestamp.id)).scalar() self.assertEqual(n, 0) raise Exception('foo') except Exception as e: self.assertEqual(str(e), 'foo') # for some weird reason assertRaisesRegex was not working n = s.query(count(Timestamp.id)).scalar() self.assertEqual(n, 0)
def test_constant(self): # this is currently failing because there are multiple statistics called Active Distance with NamedTemporaryFile() as f: args, db = bootstrap_file(f, m(V), '5') bootstrap_file(f, m(V), '5', mm(DEV), configurator=default) with db.session_context() as s: n = s.query(count(Constant.id)).scalar() self.assertEqual(n, 13) args, db = bootstrap_file(f, m(V), '5', 'constants', '--set', 'FTHR.%', '154') constants(args, db) with db.session_context() as s: n = s.query(count(Constant.id)).scalar() self.assertEqual(n, 13)
def test_empty_data(self): with NamedTemporaryFile() as f: args, sys, db = bootstrap_file(f, m(V), '5') bootstrap_file(f, m(V), '5', mm(DEV), configurator=default) args, sys, db = bootstrap_file( f, m(V), '5', mm(DEV), 'monitor', mm(FAST), 'data/test/source/other/37140810636.fit') monitor(args, sys, db) # run('sqlite3 %s ".dump"' % f.name, shell=True) run_pipeline(sys, db, PipelineType.STATISTIC, n_cpu=1) # run('sqlite3 %s ".dump"' % f.name, shell=True) with db.session_context() as s: n = s.query(func.count(StatisticJournal.id)).scalar() self.assertEqual(n, 20) mjournal = s.query(MonitorJournal).one() self.assertNotEqual(mjournal.start, mjournal.finish)
def test_monitor(self): with NamedTemporaryFile() as f: args, db = bootstrap_file(f, m(V), '5') bootstrap_file(f, m(V), '5', mm(DEV), configurator=default) args, db = bootstrap_file(f, m(V), '5', mm(DEV), 'monitor', mm(FAST), 'data/test/source/personal/25822184777.fit') monitor(args, db) # run('sqlite3 %s ".dump"' % f.name, shell=True) run_pipeline(db, PipelineType.STATISTIC, force=True, start='2018-01-01') # run('sqlite3 %s ".dump"' % f.name, shell=True) with db.session_context() as s: n = s.query(func.count(StatisticJournal.id)).scalar() # self.assertEqual(n, 111) self.assertEqual(n, 108) # why? mjournal = s.query(MonitorJournal).one() self.assertNotEqual(mjournal.start, mjournal.finish)
def test_heart_alarms(self): with NamedTemporaryFile() as f: bootstrap_file(f, m(V), '5') bootstrap_file(f, m(V), '5', mm(DEV), configurator=default) args, db = bootstrap_file(f, m(V), '5', mm(DEV), 'activities', mm(FAST), 'data/test/source/personal/2016-07-19-mpu-s-z2.fit') activities(args, db) # run('sqlite3 %s ".dump"' % f.name, shell=True) run_pipeline(db, PipelineType.STATISTIC, n_cpu=1) # run('sqlite3 %s ".dump"' % f.name, shell=True) with db.session_context() as s: stat = s.query(StatisticJournal). \ join(StatisticName). \ filter(StatisticName.name == ACTIVE_DISTANCE).one() self.assertGreater(stat.value, 30000)
def test_constant(self): with NamedTemporaryFile() as f: bootstrap_file(f, m(V), '5', mm(DEV), configurator=default) args, sys, db = bootstrap_file(f, m(V), '5', 'constants', 'set', 'FTHR.%', '154') constants(args, sys, db) args, sys, db = bootstrap_file(f, m(V), '5', 'constants', 'show', 'FTHR.%') constants(args, sys, db) args, sys, db = bootstrap_file( f, m(V), '5', mm(DEV), 'activities', mm(FAST), 'data/test/source/personal/2018-03-04-qdp.fit') activities(args, sys, db) with db.session_context() as s: stats = activity_statistics(s, LATITUDE, LONGITUDE, SPHERICAL_MERCATOR_X, SPHERICAL_MERCATOR_Y, DISTANCE, ELEVATION, SPEED, CADENCE, HEART_RATE, local_time='2018-03-04 07:16:33', activity_group_name='Bike', with_timespan=True) stats.describe() sepn = pd.Series(stats.index).diff().median() # 7 secs start = stats.index.min() # 2018-03-04 10:16:33+00:00 finish = stats.index.max() # 2018-03-04 16:34:51+00:00 even = pd.DataFrame({'keep': True}, index=pd.date_range(start=start, end=finish, freq=sepn)) both = stats.join(even, how='outer', sort=True) both.interpolate(method='index', limit_area='inside', inplace=True) both = both.loc[both['keep'] == True].drop(columns=['keep']) both = both.loc[both[TIMESPAN_ID].isin( stats[TIMESPAN_ID].unique())] both.describe()
def test_segment_bug(self): with NamedTemporaryFile() as f: args, db = bootstrap_file(f, m(V), '5', mm(DEV), configurator=default) paths = ['/home/andrew/archive/fit/bike/2016-07-27-pm-z4.fit'] run_pipeline(db, PipelineType.ACTIVITY, paths=paths, force=True)
def test_bikes(self): with NamedTemporaryFile() as f: args, db = bootstrap_file(f, m(V), '5', configurator=default) with db.session_context() as s: with self.assertRaises(Exception) as ctx: start(s, 'bike', 'cotic', None, False) self.assertTrue('--force' in str(ctx.exception), ctx.exception) start(s, 'bike', 'cotic', None, True) # run('sqlite3 %s ".dump"' % f.name, shell=True) with self.assertRaises(Exception) as ctx: start(s, 'xxxx', 'marin', None, False) self.assertTrue('--force' in str(ctx.exception), ctx.exception) start(s, 'bike', 'marin', None, False) with self.assertRaises(Exception) as ctx: change(s, 'cotic', 'chain', 'sram', '2018-02-01', False, False) self.assertTrue('--force' in str(ctx.exception)) change(s, 'cotic', 'chain', 'sram', None, True, True) change(s, 'cotic', 'chain', 'kcm', '2018-01-01', False, False) change(s, 'cotic', 'chain', 'sram', '2018-05-01', False, False) change(s, 'cotic', 'chain', 'kcm', '2018-07-01', False, False) change(s, 'cotic', 'chain', 'sram', '2018-04-01', False, False) with self.assertRaises(Exception) as ctx: start(s, 'bike', 'bike', None, True) self.assertTrue('bike' in str(ctx.exception), ctx.exception) with self.assertRaises(Exception) as ctx: start(s, 'bike', 'sram', None, True) self.assertTrue('sram' in str(ctx.exception), ctx.exception) start(s, 'bike', 'bowman', None, False) change(s, 'bowman', 'chain', 'sram', None, False, True) self.assertEqual(len(show(s, 'cotic', None)), 3) self.assertEqual(len(statistics(s, 'bike')), 16) self.assertEqual(len(statistics(s, 'cotic')), 20) self.assertEqual(len(statistics(s, 'chain')), 33) self.assertEqual(len(statistics(s, 'sram')), 16) finish(s, 'bowman', None, False) with self.assertRaises(Exception) as ctx: finish(s, 'bowman', None, False) self.assertTrue('retired' in str(ctx.exception), ctx.exception) self.assertEqual( len( KitModel.get_all(s, KitItem.get(s, 'cotic'), KitComponent.get(s, 'chain'))), 5) undo(s, 'cotic', 'chain', 'sram', None, True) self.assertEqual( len( KitModel.get_all(s, KitItem.get(s, 'cotic'), KitComponent.get(s, 'chain'))), 2) undo(s, 'cotic', 'chain', 'kcm', None, True) self.assertEqual( len( KitModel.get_all(s, KitItem.get(s, 'cotic'), KitComponent.get(s, 'chain'))), 0) undo(s, 'bowman', 'chain', 'sram', None, True) self.assertFalse(KitComponent.get(s, 'chain', require=False))
def test_exponential_distance(self): with NamedTemporaryFile() as f: args, sys, db = bootstrap_file(f, m(V), '5', mm(DEV), configurator=default) plan = exponential_distance('Distance test', 'w[mon,wed,fri]', '20km', 5, '2018-07-25', '1m') plan.create(db) # run('sqlite3 %s ".dump"' % f.name, shell=True) with db.session_context() as s: root = s.query(DiaryTopic).filter(DiaryTopic.parent_id == None, DiaryTopic.name == 'Plan').one() self.assertEqual(len(root.children), 1) parent = root.children[0] self.assertEqual(len(parent.children), 14) for child in parent.children: print(child)
def test_british(self): with NamedTemporaryFile() as f: args, sys, db = bootstrap_file(f, m(V), '5', mm(DEV), configurator=default) plan = twelve_week_improver('2018-07-25') plan.create(db) # run('sqlite3 %s ".dump"' % f.name, shell=True) with db.session_context() as s: root = s.query(DiaryTopic).filter(DiaryTopic.parent_id == None, DiaryTopic.name == 'Plan').one() self.assertEqual(len(root.children), 1) self.assertTrue(root.schedule) self.assertEqual(root.schedule.start, to_date('2018-07-25')) self.assertEqual(root.schedule.finish, add_date('2018-07-25', (12, 'w'))) parent = root.children[0] self.assertEqual(len(parent.children), 7) for child in parent.children: print(child)
def test_reftuple(self): Power = reftuple('Power', 'bike, weight') power = Power('#Bike', '$Weight') with NamedTemporaryFile() as f: args, db = bootstrap_file(f, m(V), '5') with db.session_context() as s: source = Source(type=SourceType.SOURCE) s.add(source) StatisticJournalText.add(s, 'Bike', None, None, self, None, source, '{"mass": 42}', '1980-01-01') StatisticJournalFloat.add(s, 'Weight', None, None, self, None, source, 13, '1980-01-01') p = power.expand(self.log, s, '1990-01-01', owner=self) self.assertEqual(p.weight, 13) self.assertEqual(p.bike['mass'], 42)
def test_activities(self): with NamedTemporaryFile() as f: args, db = bootstrap_file(f, m(V), '5') bootstrap_file(f, m(V), '5', mm(DEV), configurator=default) args, db = bootstrap_file(f, m(V), '5', 'constants', '--set', 'FTHR.%', '154') constants(args, db) args, db = bootstrap_file(f, m(V), '5', 'constants', 'FTHR.%') constants(args, db) args, db = bootstrap_file(f, m(V), '5', 'constants', '--set', 'SRTM1.dir', '/home/andrew/archive/srtm1') constants(args, db) args, db = bootstrap_file( f, m(V), '5', mm(DEV), 'activities', mm(FAST), 'data/test/source/personal/2018-08-27-rec.fit') activities(args, db) # run('sqlite3 %s ".dump"' % f.name, shell=True) run_pipeline(db, PipelineType.STATISTIC, force=True, start='2018-01-01', n_cpu=1) # run('sqlite3 %s ".dump"' % f.name, shell=True) with db.session_context() as s: n_raw = s.query(count(StatisticJournalFloat.id)). \ join(StatisticName). \ filter(StatisticName.name == RAW_ELEVATION).scalar() self.assertEqual(2099, n_raw) n_fix = s.query(count(StatisticJournalFloat.id)). \ join(StatisticName). \ filter(StatisticName.name == ELEVATION).scalar() self.assertEqual(2099, n_fix) n = s.query(count(StatisticJournal.id)).scalar() self.assertEqual(23655, n) journal = s.query(ActivityJournal).one() self.assertNotEqual(journal.start, journal.finish)
from ch2.lib.date import to_time from ch2.sql.utils import add from ch2.sql.tables.source import Source from ch2.sql.tables.statistic import StatisticJournal, StatisticJournalText, StatisticJournalInteger, \ StatisticJournalFloat, StatisticName from ch2.sql.tables.topic import DiaryTopicJournal, DiaryTopic old = connect('/home/andrew/.ch2/database.sqld') old.row_factory = Row class File: name = '/home/andrew/.ch2/database.sqle' args, db = bootstrap_file(File(), m(V), '5', configurator=acooke) s = db.session() def assert_empty(cls): assert s.query(cls).count() == 0, cls assert_empty(Source) assert_empty(StatisticJournal) assert_empty(DiaryTopicJournal) diary = s.query(DiaryTopic).filter(DiaryTopic.name == 'DailyDiary').one() fields = dict((field.statistic_name.name, field.statistic_name) for field in diary.fields) notes = fields['Notes']
def test_sources(self): with NamedTemporaryFile() as f: args, db = bootstrap_file(f, m(V), '5', configurator=acooke) with db.session_context() as s: # add a diary entry diary = s.query(Topic).filter(Topic.name == 'Diary').one() d = add(s, TopicJournal(topic=diary, date='2018-09-29')) d.populate(log, s) self.assertEqual(len(d.topic.fields), 9, list(enumerate(map(str, d.topic.fields)))) self.assertEqual(d.topic.fields[0].statistic_name.name, 'Notes') self.assertEqual(d.topic.fields[1].statistic_name.name, 'Weight', str(d.topic.fields[1])) for field in d.topic.fields: if field in d.statistics: self.assertTrue(d.statistics[field].value is None, field) d.statistics[d.topic.fields[0]].value = 'hello world' d.statistics[d.topic.fields[1]].value = 64.5 with db.session_context() as s: # check the diary entry was persisted diary = s.query(Topic).filter(Topic.name == 'Diary').one() d = s.query(TopicJournal).filter( TopicJournal.topic == diary, TopicJournal.date == '2018-09-29').one() s.flush() d.populate(log, s) self.assertEqual(len(d.topic.fields), 9, list(enumerate(map(str, d.topic.fields)))) self.assertEqual(d.topic.fields[0].statistic_name.name, 'Notes') self.assertEqual(d.statistics[d.topic.fields[0]].value, 'hello world') self.assertEqual(d.topic.fields[1].statistic_name.name, 'Weight') self.assertEqual(d.statistics[d.topic.fields[1]].value, 64.5) self.assertEqual(d.statistics[d.topic.fields[1]].type, StatisticJournalType.FLOAT) # generate summary stats SummaryCalculator(db, schedule='m').run() SummaryCalculator(db, schedule='y').run() with db.session_context() as s: # check the summary stats diary = s.query(Topic).filter(Topic.name == 'Diary').one() weight = s.query(StatisticJournal).join(StatisticName). \ filter(StatisticName.owner == diary, StatisticName.name == 'Weight').one() self.assertEqual(weight.value, 64.5) self.assertEqual(len(weight.measures), 2, weight.measures) self.assertEqual(weight.measures[0].rank, 1) self.assertEqual(weight.measures[0].percentile, 100, weight.measures[0].percentile) n = s.query(count(StatisticJournalFloat.id)).scalar() self.assertEqual(n, 4, n) n = s.query(count(StatisticJournalInteger.id)).scalar() self.assertEqual(n, 11, n) m_avg = s.query(StatisticJournalFloat).join(StatisticName). \ filter(StatisticName.name == 'Avg/Month Weight').one() self.assertEqual(m_avg.value, 64.5) y_avg = s.query(StatisticJournalFloat).join(StatisticName). \ filter(StatisticName.name == 'Avg/Year Weight').one() self.assertEqual(y_avg.value, 64.5) month = s.query(Interval).filter( Interval.schedule == 'm').one() self.assertEqual(month.start, to_date('2018-09-01'), month.start) self.assertEqual(month.finish, to_date('2018-10-01'), month.finish) with db.session_context() as s: # delete the diary entry diary = s.query(Topic).filter(Topic.name == 'Diary').one() d = s.query(TopicJournal).filter( TopicJournal.topic == diary, TopicJournal.date == '2018-09-29').one() s.delete(d) run('sqlite3 %s ".dump"' % f.name, shell=True) with db.session_context() as s: # check the delete cascade self.assertEqual(s.query(count(TopicJournal.id)).scalar(), 0) # this should be zero because the Intervals were automatically deleted for source in s.query(Source).all(): print(source) for journal in s.query(StatisticJournal).all(): print(journal) self.assertEqual( s.query(count(Source.id)).scalar(), 11, list(map(str, s.query(Source).all()))) # constants self.assertEqual( s.query(count(StatisticJournalText.id)).scalar(), 7, s.query(count(StatisticJournalText.id)).scalar()) self.assertEqual( s.query(count(StatisticJournal.id)).scalar(), 7, s.query(count(StatisticJournal.id)).scalar())