def test_to_datetime(self): self.assertEqual(format_time(to_time('2810-09-21 13:24:01')), '2810-09-21 13:24:01') self.assertEqual(format_time(to_time('2810-09-21 13:24')), '2810-09-21 13:24:00') self.assertEqual(format_time(to_time('2810-09-21')), '2810-09-21 00:00:00')
def __add(self, s, aj_id, start, finish, constraint): log.info('%s - %s (%d)' % (to_time(start), to_time(finish), aj_id)) s.add( ActivityBookmark(activity_journal_id=aj_id, start=start, finish=finish, owner=self, constraint=constraint))
def test_values(self): with NamedTemporaryFile() as f: bootstrap_file(f, m(V), '5') bootstrap_file(f, m(V), '5', mm(DEV), configurator=default) for file in ('24696157869', '24696160481', '24696163486'): args, db = bootstrap_file(f, m(V), '5', mm(DEV), 'monitor', mm(FAST), 'data/test/source/personal/[email protected]_%s.fit' % file) monitor(args, db) # run('sqlite3 %s ".dump"' % f.name, shell=True) run_pipeline(db, PipelineType.STATISTIC, force=True, start='2018-01-01', n_cpu=1) run('sqlite3 %s ".dump"' % f.name, shell=True) with db.session_context() as s: mjournals = s.query(MonitorJournal).order_by(MonitorJournal.start).all() assert mjournals[2].start == to_time('2018-09-06 15:06:00'), mjournals[2].start print(mjournals[2].fit_file) # steps summary = s.query(StatisticJournal).join(StatisticName). \ filter(StatisticJournal.time >= local_date_to_time('2018-09-06'), StatisticJournal.time < local_date_to_time('2018-09-07'), StatisticName.owner == MonitorCalculator, StatisticName.name == DAILY_STEPS).one() # connect has 12757 for this date, self.assertEqual(summary.value, 12757) # heart rate summary = s.query(StatisticJournal).join(StatisticName). \ filter(StatisticJournal.time >= local_date_to_time('2018-09-06'), StatisticJournal.time < local_date_to_time('2018-09-07'), StatisticName.owner == MonitorCalculator, StatisticName.name == REST_HR).one() self.assertEqual(summary.value, 45)
def injury_notes(old_name, new_name): injury_id = next( old.execute('''select id from injury where name like ?''', [old_name]))[0] topic = s.query(Topic).filter(Topic.name == new_name).one() notes = s.query(StatisticName).filter( StatisticName.name == 'Notes', StatisticName.constraint == topic.id).one() for row in old.execute( '''select date, notes from injury_diary where injury_id = ?''', [injury_id]): if row['notes']: # print(row['notes'], len(row['notes'])) tj = add(s, TopicJournal(time=to_time(row['date']), topic=topic)) add( s, StatisticJournalText(statistic_name=notes, source=tj, value=row['notes']))
def test_values(self): with TemporaryDirectory() as f: bootstrap_dir(f, m(V), '5') bootstrap_dir(f, m(V), '5', mm(DEV), configurator=default) for file in ('24696157869', '24696160481', '24696163486'): args, data = bootstrap_dir( f, m(V), '5', mm(DEV), 'read', mm(MONITOR), 'data/test/source/personal/[email protected]_%s.fit' % file) read(args, data) # path = args.system_path(subdir='data', file='activity.db') # run(f'sqlite3 {path} ".dump"', shell=True) run_pipeline(data, PipelineType.CALCULATE, force=True, like=('%Monitor%', ), start='2018-01-01', n_cpu=1) with data.db.session_context() as s: mjournals = s.query(MonitorJournal).order_by( MonitorJournal.start).all() assert mjournals[2].start == to_time( '2018-09-06 15:06:00'), mjournals[2].start # steps summary = s.query(StatisticJournal).join(StatisticName). \ filter(StatisticJournal.time >= local_date_to_time('2018-09-06'), StatisticJournal.time < local_date_to_time('2018-09-07'), StatisticName.owner == MonitorCalculator, StatisticName.name == N.DAILY_STEPS).one() if summary.value != 12757: path = args.system_path(subdir='data', file='activity.db') run('sqlite3 %s "select * from statistic_journal as j, statistic_journal_integer as i, ' 'statistic_name as n where j.id = i.id and j.statistic_name_id = n.id and ' 'n.name = \'steps\' order by j.time"' % path, shell=True) run('sqlite3 %s "select * from statistic_journal as j, statistic_journal_integer as i, ' 'statistic_name as n where j.id = i.id and j.statistic_name_id = n.id and ' 'n.name = \'cumulative-steps\' order by j.time"' % path, shell=True) # connect has 12757 for this date, self.assertEqual(summary.value, 12757)
fields = dict((field.statistic_name.name, field.statistic_name) for field in diary.fields) notes = fields['Notes'] mood = fields['Mood'] hr = fields['Rest HR'] weight = fields['Weight'] sleep = fields['Sleep'] weather = fields['Weather'] meds = fields['Medication'] for row in old.execute( '''select date, notes, rest_heart_rate, sleep, mood, weather, medication, weight from diary''', []): if row['notes'] or row['mood'] or row['rest_heart_rate'] or row[ 'weight'] or row['sleep'] or row['weather']: tj = add(s, DiaryTopicJournal(time=to_time(row['date']), topic=diary)) if row['notes']: add( s, StatisticJournalText(statistic_name=notes, source=tj, value=row['notes'])) if row['mood']: add( s, StatisticJournalInteger(statistic_name=mood, source=tj, value=row['mood'])) if row['rest_heart_rate']: add( s,
def test_tz(self): time = local_time_to_time('2019-02-16 12:19:00') self.assertEqual(time, to_time('2019-02-16 15:19:00')) self.assertEqual(time_to_local_time(time), '2019-02-16 12:19:00')