def test_constant(self): with TemporaryDirectory() as f: bootstrap_dir(f, m(V), '5', mm(DEV), configurator=default) args, data = bootstrap_dir( f, m(V), '5', mm(DEV), 'read', 'data/test/source/personal/2018-03-04-qdp.fit') read(args, data) with data.db.session_context() as s: stats = Statistics(s, activity_journal='2018-03-04 07:16:33', with_timespan=True). \ by_name(SegmentReader, N.LATITUDE, N.LONGITUDE, N.SPHERICAL_MERCATOR_X, N.SPHERICAL_MERCATOR_Y, N.DISTANCE, N.ELEVATION, N.SPEED, N.CADENCE, N.HEART_RATE).df stats.describe() sepn = pd.Series(stats.index).diff().median() # 7 secs start = stats.index.min() # 2018-03-04 10:16:33+00:00 finish = stats.index.max() # 2018-03-04 16:34:51+00:00 even = pd.DataFrame({'keep': True}, index=pd.date_range(start=start, end=finish, freq=sepn)) both = stats.join(even, how='outer', sort=True) both.interpolate(method='index', limit_area='inside', inplace=True) both = both.loc[both['keep'] == True].drop(columns=['keep']) both = both.loc[both[N.TIMESPAN_ID].isin( stats[N.TIMESPAN_ID].unique())] both.describe()
def generic_bug(self, files, join=False): with TemporaryDirectory() as f: args, data = bootstrap_dir(f, m(V), '5') bootstrap_dir(f, m(V), '5', mm(DEV), configurator=default) if join: files = [ 'data/test/source/personal/[email protected]_%s.fit' % file for file in files ] args, data = bootstrap_dir(f, mm(DEV), 'read', *files) read(args, data) else: for file in files: args, data = bootstrap_dir( f, mm(DEV), 'read', 'data/test/source/personal/[email protected]_%s.fit' % file) read(args, data) # run('sqlite3 %s ".dump"' % f.name, shell=True) with data.db.session_context() as s: # steps summary = s.query(StatisticJournal).join(StatisticName). \ filter(StatisticJournal.time >= local_date_to_time('2018-10-07'), StatisticJournal.time < local_date_to_time('2018-10-08'), StatisticName.owner == MonitorCalculator, StatisticName.name == N.DAILY_STEPS).one() # connect has 3031 for this date. self.assertEqual(summary.value, 3031)
def spline(self, smooth=0): with TemporaryDirectory() as f: bootstrap_dir(f, m(V), '5', mm(DEV), configurator=default) args, data = bootstrap_dir(f, m(V), '5', 'constants', 'set', SRTM1_DIR_CNAME, '/home/andrew/archive/srtm1', mm(FORCE)) constants(args, data) with data.db.session_context() as s: yield spline_elevation_from_constant(s, smooth=smooth)
def test_florian(self): with TemporaryDirectory() as f: bootstrap_dir(f, m(V), '5') bootstrap_dir(f, m(V), '5', mm(DEV), configurator=default) args, data = bootstrap_dir(f, m(V), '5', mm(DEV), 'read', 'data/test/source/private/florian.fit') read(args, data) # run('sqlite3 %s ".dump"' % f.name, shell=True) run_pipeline(data, PipelineType.CALCULATE, n_cpu=1) # run('sqlite3 %s ".dump"' % f.name, shell=True) with data.db.session_context() as s: self.__assert_basic_stats(s)
def test_920(self): for src in '920xt-2019-05-16_19-42-54.fit', '920xt-2019-05-16_19-42-54.fit': with TemporaryDirectory() as f: bootstrap_dir(f, m(V), '5') bootstrap_dir(f, m(V), '5', mm(DEV), configurator=default) args, data = bootstrap_dir(f, m(V), '5', mm(DEV), 'read', f'data/test/source/other/{src}') read(args, data) # run('sqlite3 %s ".dump"' % f.name, shell=True) run_pipeline(data, PipelineType.CALCULATE, n_cpu=1) # run('sqlite3 %s ".dump"' % f.name, shell=True) with data.db.session_context() as s: self.__assert_basic_stats(s)
def test_constant(self): with TemporaryDirectory() as f: args, data = bootstrap_dir(f, m(V), '5') bootstrap_dir(f, m(V), '5', mm(DEV), configurator=default) with data.db.session_context() as s: n = s.query(count(Constant.id)).scalar() self.assertEqual(n, 14) args, data = bootstrap_dir(f, m(V), '5', 'constants', 'set', 'fthr:%', '154', mm(FORCE)) constants(args, data) with data.db.session_context() as s: n = s.query(count(Constant.id)).scalar() self.assertEqual(n, 14)
def test_models(self): with TemporaryDirectory() as f: args, data = bootstrap_dir(f, m(V), '5', configurator=default) with data.db.session_context() as s: start(s, 'bike', 'cotic', '2018-01-01', True) start(s, 'bike', 'marin', '2018-01-01', False) change(s, 'cotic', 'chain', 'sram', None, True, True) change(s, 'cotic', 'chain', 'kcm', '2018-01-01', False, False) change(s, 'cotic', 'chain', 'sram', '2018-05-01', False, False) change(s, 'cotic', 'chain', 'kcm', '2018-07-01', False, False) change(s, 'cotic', 'chain', 'sram', '2018-04-01', False, False) start(s, 'bike', 'bowman', '2018-01-01', False) change(s, 'bowman', 'chain', 'sram', None, False, True) args, data = bootstrap_dir( f, m(V), '5', mm(DEV), READ, 'data/test/source/personal/2018-08-03-rec.fit', m(K), 'cotic') read(args, data) args, data = bootstrap_dir( f, m(V), '5', mm(DEV), READ, 'data/test/source/personal/2018-08-27-rec.fit', m(K), 'cotic') read(args, data) run_pipeline(data, PipelineType.CALCULATE, like=['%Activity%'], n_cpu=1) run_pipeline(data, PipelineType.CALCULATE, like=['%Kit%'], n_cpu=1) with data.db.session_context() as s: bike = get_name(s, 'bike').to_model(s, depth=3, statistics=INDIVIDUAL, own_models=False) self.assertEqual(bike[TYPE], KitGroup.SIMPLE_NAME) self.assertEqual(bike[NAME], 'bike') self.assertEqual(len(bike[ITEMS]), 3) cotic = [ item for item in bike[ITEMS] if item[NAME] == 'cotic' ][0] self.assertEqual(cotic[TYPE], KitItem.SIMPLE_NAME) self.assertEqual(cotic[NAME], 'cotic') self.assertEqual(len(cotic[COMPONENTS]), 1) chain = cotic[COMPONENTS][0] self.assertEqual(chain[TYPE], KitComponent.SIMPLE_NAME) self.assertEqual(chain[NAME], 'chain') self.assertEqual(len(chain[MODELS]), 6) self.assertFalse(STATISTICS in bike)
def test_michael(self): with TemporaryDirectory() as f: bootstrap_dir(f, m(V), '5') bootstrap_dir(f, m(V), '5', mm(DEV), configurator=default) args, data = bootstrap_dir( f, m(V), '5', mm(DEV), 'read', 'data/test/source/other/2019-05-09-051352-Running-iWatchSeries3.fit' ) read(args, data) # run('sqlite3 %s ".dump"' % f.name, shell=True) run_pipeline(data, PipelineType.CALCULATE, n_cpu=1) # run('sqlite3 %s ".dump"' % f.name, shell=True) with data.db.session_context() as s: self.__assert_basic_stats(s)
def test_context(self): with TemporaryDirectory() as f: args, data = bootstrap_dir(f, m(V), '5') args, data = bootstrap_dir(f, m(V), '5', mm(DEV), configurator=default) with data.db.session_context() as s: with Timestamp(owner=TestTimestamp).on_success(s): n = s.query(count(Timestamp.id)).scalar() self.assertEqual(n, 0) n = s.query(count(Timestamp.id)).scalar() self.assertEqual(n, 1)
def test_empty_data(self): with TemporaryDirectory() as f: args, data = bootstrap_dir(f, m(V), '5') bootstrap_dir(f, m(V), '5', mm(DEV), configurator=default) args, data = bootstrap_dir( f, m(V), '5', mm(DEV), 'read', 'data/test/source/other/37140810636.fit') read(args, data) # run('sqlite3 %s ".dump"' % f.name, shell=True) run_pipeline(data, PipelineType.CALCULATE, n_cpu=1) # run('sqlite3 %s ".dump"' % f.name, shell=True) with data.db.session_context() as s: n = s.query(func.count(StatisticJournal.id)).scalar() self.assertEqual(n, 44) mjournal = s.query(MonitorJournal).one() self.assertNotEqual(mjournal.start, mjournal.finish)
def test_heart_alarms(self): with TemporaryDirectory() as f: bootstrap_dir(f, m(V), '5') bootstrap_dir(f, m(V), '5', mm(DEV), configurator=default) args, data = bootstrap_dir( f, m(V), '5', mm(DEV), 'read', 'data/test/source/personal/2016-07-19-mpu-s-z2.fit') read(args, data) # run('sqlite3 %s ".dump"' % f.name, shell=True) run_pipeline(data, PipelineType.CALCULATE, n_cpu=1) # run('sqlite3 %s ".dump"' % f.name, shell=True) with data.db.session_context() as s: for stat in s.query(StatisticJournal). \ join(StatisticName). \ filter(StatisticName.name == N.ACTIVE_DISTANCE).all(): self.assertGreater(stat.value, 30)
def test_set(self): with TemporaryDirectory() as f: args, data = bootstrap_dir(f, m(V), '5') args, data = bootstrap_dir(f, m(V), '5', mm(DEV), configurator=default) with data.db.session_context() as s: source = add(s, Source()) n = s.query(count(Timestamp.id)).scalar() self.assertEqual(n, 0) Timestamp.set(s, TestTimestamp, source=source) n = s.query(count(Timestamp.id)).scalar() self.assertEqual(n, 1) t = s.query(Timestamp).filter( Timestamp.owner == TestTimestamp).one() self.assertAlmostEqual(t.time.timestamp(), dt.datetime.now().timestamp(), 1)
def test_values(self): with TemporaryDirectory() as f: bootstrap_dir(f, m(V), '5') bootstrap_dir(f, m(V), '5', mm(DEV), configurator=default) for file in ('24696157869', '24696160481', '24696163486'): args, data = bootstrap_dir( f, m(V), '5', mm(DEV), 'read', mm(MONITOR), 'data/test/source/personal/[email protected]_%s.fit' % file) read(args, data) # path = args.system_path(subdir='data', file='activity.db') # run(f'sqlite3 {path} ".dump"', shell=True) run_pipeline(data, PipelineType.CALCULATE, force=True, like=('%Monitor%', ), start='2018-01-01', n_cpu=1) with data.db.session_context() as s: mjournals = s.query(MonitorJournal).order_by( MonitorJournal.start).all() assert mjournals[2].start == to_time( '2018-09-06 15:06:00'), mjournals[2].start # steps summary = s.query(StatisticJournal).join(StatisticName). \ filter(StatisticJournal.time >= local_date_to_time('2018-09-06'), StatisticJournal.time < local_date_to_time('2018-09-07'), StatisticName.owner == MonitorCalculator, StatisticName.name == N.DAILY_STEPS).one() if summary.value != 12757: path = args.system_path(subdir='data', file='activity.db') run('sqlite3 %s "select * from statistic_journal as j, statistic_journal_integer as i, ' 'statistic_name as n where j.id = i.id and j.statistic_name_id = n.id and ' 'n.name = \'steps\' order by j.time"' % path, shell=True) run('sqlite3 %s "select * from statistic_journal as j, statistic_journal_integer as i, ' 'statistic_name as n where j.id = i.id and j.statistic_name_id = n.id and ' 'n.name = \'cumulative-steps\' order by j.time"' % path, shell=True) # connect has 12757 for this date, self.assertEqual(summary.value, 12757)
def test_constant(self): with TemporaryDirectory() as f: bootstrap_dir(f, m(V), '5', mm(DEV), configurator=default) args, data = bootstrap_dir(f, m(V), '5', mm(DEV), 'read', 'data/test/source/personal/2018-03-04-qdp.fit', '-Kn_cpu=1') read(args, data) with data.db.session_context() as s: model = list(read_date(s, to_date('2018-03-04'))) for i, x in enumerate(model): print(i, x) [title, diary, shrimp, activity, database] = model activity = activity[1][2] # multiple now supported print(activity) name = activity[1] print(name) self.assertEqual(name[LABEL], 'Name') self.assertEqual(name[VALUE], '2018-03-04T07:16:33') route = activity[2] self.assertEqual(route[LABEL], 'Route')
def test_context_error(self): with TemporaryDirectory() as f: args, data = bootstrap_dir(f, m(V), '5') args, data = bootstrap_dir(f, m(V), '5', mm(DEV), configurator=default) with data.db.session_context() as s: try: with Timestamp(owner=TestTimestamp).on_success(s): n = s.query(count(Timestamp.id)).scalar() self.assertEqual(n, 0) raise Exception('foo') except Exception as e: self.assertEqual( str(e), 'foo' ) # for some weird reason assertRaisesRegex was not working n = s.query(count(Timestamp.id)).scalar() self.assertEqual(n, 0)
def test_segment_bug(self): with TemporaryDirectory() as f: args, data = bootstrap_dir(f, m(V), '5', mm(DEV), configurator=default) paths = [ '/home/andrew/archive/fit/bike/cotic/2016-07-27-pm-z4.fit' ] run_pipeline(data, PipelineType.READ_ACTIVITY, paths=paths, force=True)
def test_reftuple(self): Power = reftuple('Power', 'bike, weight') power = Power('${bike}', '${weight}') with TemporaryDirectory() as f: args, data = bootstrap_dir(f, m(V), '5') with data.db.session_context() as s: source = Source(type=SourceType.SOURCE) s.add(source) StatisticJournalText.add(s, 'Bike', None, None, self, source, '{"mass": 42}', '1980-01-01') StatisticJournalFloat.add(s, 'Weight', None, None, self, source, 13, '1980-01-01') p = power.expand(s, '1990-01-01', default_owner=self) p = p._replace(bike=loads(p.bike)) self.assertEqual(p.weight, 13) self.assertEqual(p.bike['mass'], 42)
def test_exponential_distance(self): with TemporaryDirectory() as f: args, data = bootstrap_dir(f, m(V), '5', mm(DEV), configurator=default) plan = exponential_distance('Distance test', 'w[mon,wed,fri]', '20km', 5, '2018-07-25', '1m') plan.create(data.db) # run('sqlite3 %s ".dump"' % f.name, shell=True) with data.db.session_context() as s: root = s.query(DiaryTopic).filter( DiaryTopic.parent_id == None, DiaryTopic.title == 'Plan').one() self.assertEqual(len(root.children), 1) parent = root.children[0] self.assertEqual(len(parent.children), 14) for child in parent.children: print(child)
def test_british(self): with TemporaryDirectory() as f: args, data = bootstrap_dir(f, m(V), '5', mm(DEV), configurator=default) plan = twelve_week_improver('2018-07-25') plan.create(data.db) # run('sqlite3 %s ".dump"' % f.name, shell=True) with data.db.session_context() as s: root = s.query(DiaryTopic).filter( DiaryTopic.parent_id == None, DiaryTopic.title == 'Plan').one() self.assertEqual(len(root.children), 1) self.assertTrue(root.schedule) self.assertEqual(root.schedule.start, to_date('2018-07-25')) self.assertEqual(root.schedule.finish, add_date('2018-07-25', (12, 'w'))) parent = root.children[0] self.assertEqual(len(parent.children), 7) for child in parent.children: print(child)
def test_activities(self): with TemporaryDirectory() as base: bootstrap_dir(base, m(V), '5') bootstrap_dir(base, m(V), '5', mm(DEV), configurator=default) args, data = bootstrap_dir(base, m(V), '5', 'constants', 'set', 'SRTM1.dir', '/home/andrew/archive/srtm1', mm(FORCE)) constants(args, data) args, data = bootstrap_dir( base, m(V), '5', mm(DEV), 'read', 'data/test/source/personal/2018-08-27-rec.fit') read(args, data) # run('sqlite3 %s ".dump"' % f.name, shell=True) run_pipeline(data, PipelineType.CALCULATE, force=True, start='2018-01-01', n_cpu=1) # run('sqlite3 %s ".dump"' % f.name, shell=True) with data.db.session_context() as s: n_raw = s.query(count(StatisticJournalFloat.id)). \ join(StatisticName). \ filter(StatisticName.name == N.RAW_ELEVATION).scalar() self.assertEqual(2099, n_raw) n_fix = s.query(count(StatisticJournalFloat.id)). \ join(StatisticName). \ filter(StatisticName.name == N.ELEVATION).scalar() self.assertEqual(2099, n_fix) # WHY does this jump around? n = s.query(count(StatisticJournal.id)).scalar() # self.assertEqual(50403, n) self.assertTrue(n > 30000) self.assertTrue(n < 100000) journal = s.query(ActivityJournal).one() self.assertNotEqual(journal.start, journal.finish)
def test_sources(self): with TemporaryDirectory() as f: args, data = bootstrap_dir(f, m(V), '5', configurator=acooke) with data.db.session_context() as s: # add a diary entry journal = add(s, DiaryTopicJournal(date='2018-09-29')) cache = journal.cache(s) diary = s.query(DiaryTopic).filter(DiaryTopic.title == 'Status').one() fields = diary.fields self.assertEqual(len(fields), 6, list(enumerate(map(str, fields)))) self.assertEqual(fields[0].statistic_name.name, 'notes') self.assertEqual(fields[1].statistic_name.name, 'weight', str(fields[1])) statistics = [cache[field] for field in fields] for statistic in statistics: self.assertTrue(statistic.value is None, statistics) statistics[0].value = 'hello world' statistics[1].value = 64.5 with data.db.session_context() as s: # check the diary entry was persisted journal = DiaryTopicJournal.get_or_add(s, '2018-09-29') cache = journal.cache(s) diary = s.query(DiaryTopic).filter(DiaryTopic.title == 'Status').one() fields = diary.fields self.assertEqual(len(fields), 6, list(enumerate(map(str, fields)))) self.assertEqual(fields[0].statistic_name.name, 'notes') self.assertEqual(fields[1].statistic_name.name, 'weight', str(fields[1])) statistics = [cache[field] for field in fields] self.assertEqual(statistics[1].value, 64.5) self.assertEqual(statistics[1].type, StatisticJournalType.FLOAT) # generate summary stats SummaryCalculator(data, schedule='m').run() SummaryCalculator(data, schedule='y').run() with data.db.session_context() as s: # check the summary stats diary = s.query(DiaryTopic).filter(DiaryTopic.title == 'Status').one() weights = s.query(StatisticJournal).join(StatisticName). \ filter(StatisticName.owner == diary, StatisticName.name == 'weight'). \ order_by(StatisticJournal.time).all() self.assertEqual(len(weights), 2) self.assertEqual(weights[1].value, 64.5) self.assertEqual(len(weights[1].measures), 2, weights[1].measures) self.assertEqual(weights[1].measures[0].rank, 1) self.assertEqual(weights[1].measures[0].percentile, 100, weights[1].measures[0].percentile) n = s.query(count(StatisticJournalFloat.id)).scalar() self.assertEqual(n, 4, n) n = s.query(count(StatisticJournalInteger.id)).scalar() self.assertEqual(n, 6, n) m_avg = s.query(StatisticJournalFloat).join(StatisticName). \ filter(StatisticName.name == 'avg-month-weight').one() self.assertEqual(m_avg.value, 64.5) y_avg = s.query(StatisticJournalFloat).join(StatisticName). \ filter(StatisticName.name == 'avg-year-weight').one() self.assertEqual(y_avg.value, 64.5) month = s.query(Interval).filter(Interval.schedule == 'm').first() self.assertEqual(month.start, to_date('2018-09-01'), month.start) self.assertEqual(month.finish, to_date('2018-10-01'), month.finish) with data.db.session_context() as s: # delete the diary entry journal = DiaryTopicJournal.get_or_add(s, '2018-09-29') s.delete(journal) with data.db.session_context() as s: # check the delete cascade self.assertEqual(s.query(count(DiaryTopicJournal.id)).scalar(), 1) # this should be zero because the Intervals were automatically deleted # (well, now +1 because there's an original default weight) for source in s.query(Source).all(): print(source) for journal in s.query(StatisticJournal).all(): print(journal) self.assertEqual(s.query(count(Source.id)).scalar(), 37, list(map(str, s.query(Source).all()))) # constants self.assertEqual(s.query(count(StatisticJournalText.id)).scalar(), 13, s.query(count(StatisticJournalText.id)).scalar()) self.assertEqual(s.query(count(StatisticJournal.id)).scalar(), 22, s.query(count(StatisticJournal.id)).scalar())
def test_bikes(self): with TemporaryDirectory() as f: args, data = bootstrap_dir(f, m(V), '5', configurator=default) with data.db.session_context() as s: with self.assertRaises(Exception) as ctx: start(s, 'bike', 'cotic', '2020-03-24', False) self.assertTrue('--force' in str(ctx.exception), ctx.exception) start(s, 'bike', 'cotic', '2020-03-24', True) # run('sqlite3 %s ".dump"' % f.name, shell=True) with self.assertRaises(Exception) as ctx: start(s, 'xxxx', 'marin', None, False) self.assertTrue('--force' in str(ctx.exception), ctx.exception) start(s, 'bike', 'marin', None, False) with self.assertRaises(Exception) as ctx: change(s, 'cotic', 'chain', 'sram', '2018-02-01', False, False) self.assertTrue('--force' in str(ctx.exception)) change(s, 'cotic', 'chain', 'sram', None, True, True) change(s, 'cotic', 'chain', 'kcm', '2018-01-01', False, False) change(s, 'cotic', 'chain', 'sram', '2018-05-01', False, False) change(s, 'cotic', 'chain', 'kcm', '2018-07-01', False, False) change(s, 'cotic', 'chain', 'sram', '2018-04-01', False, False) with self.assertRaises(Exception) as ctx: start(s, 'bike', 'bike', None, True) self.assertTrue('bike' in str(ctx.exception), ctx.exception) with self.assertRaises(Exception) as ctx: start(s, 'bike', 'sram', None, True) self.assertTrue('sram' in str(ctx.exception), ctx.exception) start(s, 'bike', 'bowman', None, False) change(s, 'bowman', 'chain', 'sram', None, False, True) self.assert_command( '''item: cotic 2020-03-24 - `-component: chain +-model: sram 2020-03-24 - +-model: kcm 2018-01-01 - 2018-04-01 +-model: sram 2018-05-01 - 2018-07-01 +-model: kcm 2018-07-01 - 2020-03-24 `-model: sram 2018-04-01 - 2018-05-01 ''', show, s, 'cotic', None) self.assert_command( f'''group: bike +-item: cotic | +-Age | | +-n: 1 | | `-sum: {days('2020-03-24')} | `-component: chain | +-model: sram | | `-Age | | +-n: 1 | | `-sum: {days('2020-03-24')} | +-model: kcm | | `-Age | | +-n: 1 | | `-sum: 90 | +-model: sram | | `-Age | | +-n: 1 | | `-sum: 61 | +-model: kcm | | `-Age | | +-n: 1 | | `-sum: 631 | `-model: sram | `-Age | +-n: 1 | `-sum: 30 +-item: marin | `-Age | +-n: 1 | `-sum: 0 `-item: bowman +-Age | +-n: 1 | `-sum: 0 `-component: chain `-model: sram `-Age +-n: 1 `-sum: 0 ''', statistics, s, 'bike') self.assert_command( f'''item: cotic +-Age | +-n: 1 | `-sum: {days('2020-03-24')} `-component: chain +-model: sram | `-Age | +-n: 1 | `-sum: {days('2020-03-24')} +-model: kcm | `-Age | +-n: 1 | `-sum: 90 +-model: sram | `-Age | +-n: 1 | `-sum: 61 +-model: kcm | `-Age | +-n: 1 | `-sum: 631 `-model: sram `-Age +-n: 1 `-sum: 30 ''', statistics, s, 'cotic') self.assert_command( f'''component: chain +-model: sram | `-Age | +-n: 1 | `-sum: {days('2020-03-24')} +-model: kcm | `-Age | +-n: 1 | `-sum: 90 +-model: sram | `-Age | +-n: 1 | `-sum: 61 +-model: kcm | `-Age | +-n: 1 | `-sum: 631 +-model: sram | `-Age | +-n: 1 | `-sum: 30 `-model: sram `-Age +-n: 1 `-sum: 0 ''', statistics, s, 'chain') self.assert_command( f'''model: sram `-Age +-n: 1 `-sum: {days('2020-03-24')} ''', statistics, s, 'sram') finish(s, 'bowman', None, False) with self.assertRaises(Exception) as ctx: finish(s, 'bowman', None, False) self.assertTrue('retired' in str(ctx.exception), ctx.exception) self.assertEqual( len( KitModel.get_all(s, KitItem.get(s, 'cotic'), KitComponent.get(s, 'chain'))), 5) undo(s, 'cotic', 'chain', 'sram', None, True) self.assertEqual( len( KitModel.get_all(s, KitItem.get(s, 'cotic'), KitComponent.get(s, 'chain'))), 2) undo(s, 'cotic', 'chain', 'kcm', None, True) self.assertEqual( len( KitModel.get_all(s, KitItem.get(s, 'cotic'), KitComponent.get(s, 'chain'))), 0) undo(s, 'bowman', 'chain', 'sram', None, True) self.assertFalse(KitComponent.get(s, 'chain', require=False))