def test_merge_at_end_too_much_overlap(): """ trying to merge so much duplicate data we give up and return just the cached data """ mts = MTS(MockRedis()) mts.key_basis = lambda: 'some-key-goes-here' new_mts = MTS(MockRedis()) mts.result = {'values': copy.deepcopy(INITIAL_MTS_DATA)} new_mts.result = {'values': copy.deepcopy(INITIAL_MTS_DATA)} mts.merge_at_end(new_mts) assert mts.result['values'] == INITIAL_MTS_DATA
def test_merge_at_end_one_overlap(): """ single overlapping point - make sure the new_mts version is favored """ mts = MTS(MockRedis()) mts.key_basis = lambda: 'some-key-goes-here' new_mts = MTS(MockRedis()) mts.result = {'values': copy.deepcopy(INITIAL_MTS_DATA)} new_mts.result = {'values': [[799, 9001], [800, 21], [801, 22]]} mts.merge_at_end(new_mts) assert mts.result['values'][-3:] == [[799, 9001], [800, 21], [801, 22]]
def test_merge_at_end_replaces_when_existing_data_is_short(): """ if we can't iterate over the cached data, and it's out of order, we replace it. """ mts = MTS(MockRedis()) mts.key_basis = lambda: 'some-key-goes-here' new_mts = MTS(MockRedis()) new_mts.result = {'values': copy.deepcopy(INITIAL_MTS_DATA)} mts.result = {'values': [[789, 100], [790, 110]]} mts.merge_at_end(new_mts) assert mts.result['values'] == INITIAL_MTS_DATA
def test_merge_at_end_no_overlap(): """ common case, data doesn't overlap """ mts = MTS(MockRedis()) mts.key_basis = lambda: 'some-key-goes-here' new_mts = MTS(MockRedis()) mts.result = {'values': copy.deepcopy(INITIAL_MTS_DATA)} new_mts.result = {'values': [[800, 21], [801, 22]]} mts.merge_at_end(new_mts) assert mts.result['values'] == INITIAL_MTS_DATA + [[800, 21], [801, 22]]