def test_multiple_profiles(self): """Exercise multiple profiles and collections""" expected_count_all = 0 profiles_count = 5 collection_names = ( 'testing', 'keys', 'tabs', 'history', 'bookmarks' ) collection_counts = {} # Produce a set of Profiles in the datastore profiles = [] for i in range(profiles_count): profile = Profile(user_name='t-%s'%i, user_id='id-%s'%i, password='******'%i) profile.put() profiles.append(profile) # Generate collections for each profile. for p in profiles: auth_header = self.build_auth_header(p.user_name, p.password) collection_counts[p.user_name] = {} # Run through several collections and make WBOs for cn in collection_names: curr_count = random.randint(1,10) collection_counts[p.user_name][cn] = curr_count expected_count_all += curr_count # Generate a bunch of random-content WBOs base_url = '/sync/1.0/%s/storage/%s' % (p.user_name, cn) for i in range(curr_count): resp = self.put_random_wbo(base_url, auth_header) # Ensure the total number of WBOs is correct. result_count_all = WBO.all().count() self.assertEqual(expected_count_all, result_count_all) # Ensure the counts for each profile collection matches inserts. for profile in profiles: counts = Collection.get_counts(profile) for name in collection_names: c = Collection.get_by_profile_and_name(profile, name) self.assertEqual( collection_counts[profile.user_name][name], WBO.get_by_collection(c).count() ) # Delete each of the collections for each user. for profile in profiles: auth_header = self.build_auth_header( profile.user_name, profile.password ) for name in collection_names: url = '/sync/1.0/%s/storage/%s' % (profile.user_name, name) resp = self.app.delete(url, headers=auth_header) # Ensure the individual collection is now empty. c = Collection.get_by_profile_and_name(profile, name) self.assertEqual(0, WBO.get_by_collection(c).count()) # Ensure there are no more WBOs result_count_all = WBO.all().count() self.assertEqual(0, result_count_all)
def test_multiple_profiles(self): """Exercise multiple profiles and collections""" expected_count_all = 0 profiles_count = 5 collection_names = ('testing', 'keys', 'tabs', 'history', 'bookmarks') collection_counts = {} # Produce a set of Profiles in the datastore profiles = [] for i in range(profiles_count): profile = Profile(user_name='t-%s' % i, user_id='id-%s' % i, password='******' % i) profile.put() profiles.append(profile) # Generate collections for each profile. for p in profiles: auth_header = self.build_auth_header(p.user_name, p.password) collection_counts[p.user_name] = {} # Run through several collections and make WBOs for cn in collection_names: curr_count = random.randint(1, 10) collection_counts[p.user_name][cn] = curr_count expected_count_all += curr_count # Generate a bunch of random-content WBOs base_url = '/sync/1.0/%s/storage/%s' % (p.user_name, cn) for i in range(curr_count): resp = self.put_random_wbo(base_url, auth_header) # Ensure the total number of WBOs is correct. result_count_all = WBO.all().count() self.assertEqual(expected_count_all, result_count_all) # Ensure the counts for each profile collection matches inserts. for profile in profiles: counts = Collection.get_counts(profile) for name in collection_names: c = Collection.get_by_profile_and_name(profile, name) self.assertEqual(collection_counts[profile.user_name][name], WBO.get_by_collection(c).count()) # Delete each of the collections for each user. for profile in profiles: auth_header = self.build_auth_header(profile.user_name, profile.password) for name in collection_names: url = '/sync/1.0/%s/storage/%s' % (profile.user_name, name) resp = self.app.delete(url, headers=auth_header) # Ensure the individual collection is now empty. c = Collection.get_by_profile_and_name(profile, name) self.assertEqual(0, WBO.get_by_collection(c).count()) # Ensure there are no more WBOs result_count_all = WBO.all().count() self.assertEqual(0, result_count_all)
def test_cascading_profile_delete(self): """Ensure that profile deletion cascades down to collections and WBOs""" (p, c, ah) = (self.profile, self.collection, self.auth_header) wbos = self.build_wbo_set() self.assert_(WBO.all().count() > 0) self.assert_(Collection.all().count() > 0) self.assert_(Profile.all().count() > 0) p.delete() self.assertEquals(0, WBO.all().count()) self.assertEquals(0, Collection.all().count()) self.assertEquals(0, Profile.all().count())
def test_cascading_collection_delete(self): """Ensure that collection deletion cascades down to WBOs""" (p, c, ah) = (self.profile, self.collection, self.auth_header) wbos = self.build_wbo_set() count_all = WBO.all().count() collections = [c for c in Collection.all().ancestor(p)] for c in collections: c_count = len([x for x in c.retrieve()]) c.delete() count_all -= c_count self.assertEqual(count_all, WBO.all().count()) self.assertEqual(0, WBO.all().count())
def test_alternate_output_formats(self): """Exercise alternate output formats""" (p, c, ah) = (self.profile, self.collection, self.auth_header) self.build_wbo_set() wbos = [ w for w in WBO.all() ] wbos.sort(lambda b,a: cmp(a.sortindex, b.sortindex)) expected_ids = [ w.wbo_id for w in wbos ] url = '/sync/1.0/%s/storage/%s?full=1' % (p.user_name, c.name) resp = self.app.get(url, headers=ah) result_data = simplejson.loads(resp.body) result_ids = [ x['id'] for x in result_data ] self.assertEqual(expected_ids, result_ids) url = '/sync/1.0/%s/storage/%s?full=1' % (p.user_name, c.name) headers = { 'Accept': 'application/newlines' } headers.update(ah) resp = self.app.get(url, headers=headers) lines = resp.body.splitlines() for line in lines: data = simplejson.loads(line) self.assert_(data['id'] in expected_ids) if (False): url = '/sync/1.0/%s/storage/%s?full=1' % (p.user_name, c.name) headers = { 'Accept': 'application/whoisi' } headers.update(ah) resp = self.app.get(url, headers=headers) lines = "\n".split(resp.body) self.log.debug("URL %s" % url) self.log.debug("RESULT %s" % resp.body) self.log.debug("RESULT2 %s" % simplejson.dumps(lines)) self.log.debug("LINES %s" % len(lines))
def test_retrieval_with_limit_offset(self): """Exercise collection retrieval with limit and offset""" (p, c, ah) = (self.profile, self.collection, self.auth_header) self.build_wbo_set() wbos = [w for w in WBO.all()] max_limit = len(wbos) / 2 max_offset = len(wbos) / 2 for c_limit in range(1, max_limit): for c_offset in range(1, max_offset): expected_ids = [ w.wbo_id for w in wbos[(c_offset):(c_offset + c_limit)] ] url = '/sync/1.0/%s/storage/%s?limit=%s&offset=%s&sort=oldest' % ( p.user_name, c.name, c_limit, c_offset) resp = self.app.get(url, headers=ah) result_data = simplejson.loads(resp.body) self.log.debug("URL %s" % url) self.log.debug("EXPECTED %s" % simplejson.dumps(expected_ids)) self.log.debug("RESULT %s" % resp.body) self.assertEqual(expected_ids, result_data)
def test_retrieval_with_limit_offset(self): """Exercise collection retrieval with limit and offset""" (p, c, ah) = (self.profile, self.collection, self.auth_header) self.build_wbo_set() wbos = [ w for w in WBO.all() ] max_limit = len(wbos) / 2 max_offset = len(wbos) / 2 for c_limit in range(1, max_limit): for c_offset in range(1, max_offset): expected_ids = [ w.wbo_id for w in wbos[ (c_offset) : (c_offset+c_limit) ] ] url = '/sync/1.0/%s/storage/%s?limit=%s&offset=%s&sort=oldest' % ( p.user_name, c.name, c_limit, c_offset ) resp = self.app.get(url, headers=ah) result_data = simplejson.loads(resp.body) self.log.debug("URL %s" % url) self.log.debug("EXPECTED %s" % simplejson.dumps(expected_ids)) self.log.debug("RESULT %s" % resp.body) self.assertEqual(expected_ids, result_data)
def test_retrieval_with_sort(self): """Exercise collection retrieval with sort options""" (p, c, ah) = (self.profile, self.collection, self.auth_header) self.build_wbo_set() wbos = [ w for w in WBO.all() ] sorts = { 'oldest': lambda a,b: cmp(a.modified, b.modified), 'newest': lambda a,b: cmp(b.modified, a.modified), 'index': lambda a,b: cmp(b.sortindex, a.sortindex), } for sort_option, sort_fn in sorts.items(): wbos.sort(sort_fn) expected_ids = [ w.wbo_id for w in wbos ] url = '/sync/1.0/%s/storage/%s?sort=%s' % ( p.user_name, c.name, sort_option ) resp = self.app.get(url, headers=ah) result_data = simplejson.loads(resp.body) self.log.debug("URL %s" % url) self.log.debug("EXPECTED %s" % simplejson.dumps(expected_ids)) self.log.debug("RESULT %s" % resp.body) self.assertEqual(expected_ids, result_data)
def test_alternate_output_formats(self): """Exercise alternate output formats""" (p, c, ah) = (self.profile, self.collection, self.auth_header) self.build_wbo_set() wbos = [w for w in WBO.all()] expected_ids = [w.wbo_id for w in wbos] url = '/sync/1.0/%s/storage/%s?full=1' % (p.user_name, c.name) resp = self.app.get(url, headers=ah) result_data = simplejson.loads(resp.body) result_ids = [x['id'] for x in result_data] self.assertEqual(expected_ids, result_ids) url = '/sync/1.0/%s/storage/%s?full=1' % (p.user_name, c.name) headers = {'Accept': 'application/newlines'} headers.update(ah) resp = self.app.get(url, headers=headers) lines = resp.body.splitlines() for line in lines: data = simplejson.loads(line) self.assert_(data['id'] in expected_ids) if (False): url = '/sync/1.0/%s/storage/%s?full=1' % (p.user_name, c.name) headers = {'Accept': 'application/whoisi'} headers.update(ah) resp = self.app.get(url, headers=headers) lines = "\n".split(resp.body) self.log.debug("URL %s" % url) self.log.debug("RESULT %s" % resp.body) self.log.debug("RESULT2 %s" % simplejson.dumps(lines)) self.log.debug("LINES %s" % len(lines))
def test_retrieval_with_sort(self): """Exercise collection retrieval with sort options""" (p, c, ah) = (self.profile, self.collection, self.auth_header) self.build_wbo_set() wbos = [w for w in WBO.all()] sorts = { 'oldest': lambda a, b: cmp(a.modified, b.modified), 'newest': lambda a, b: cmp(b.modified, a.modified), 'index': lambda a, b: cmp(a.sortindex, b.sortindex), } for sort_option, sort_fn in sorts.items(): wbos.sort(sort_fn) expected_ids = [w.wbo_id for w in wbos] url = '/sync/1.0/%s/storage/%s?sort=%s' % (p.user_name, c.name, sort_option) resp = self.app.get(url, headers=ah) result_data = simplejson.loads(resp.body) self.log.debug("URL %s" % url) self.log.debug("EXPECTED %s" % simplejson.dumps(expected_ids)) self.log.debug("RESULT %s" % resp.body) self.assertEqual(expected_ids, result_data)
def tearDown(self): """Clean up after unit test""" # Is this actually needed, since storage is mocked? self.profile.delete() q = WBO.all() for o in q: o.delete() q = Collection.all() for o in q: o.delete()
def test_bulk_update(self): """Exercise bulk collection update""" (p, c, ah) = (self.profile, self.collection, self.auth_header) auth_header = self.build_auth_header() storage_url = '/sync/1.0/%s/storage/%s' % (p.user_name, c.name) self.build_wbo_parents_and_predecessors() bulk_data = [ { 'id': '' }, { 'id': 'foo/bar', 'sortindex': 'abcd' }, { 'id': 'a-1000', 'sortindex':-1000000000 }, { 'id': 'a-1001', 'sortindex': 1000000000 }, { 'id': 'a-1002', 'parentid': 'notfound' }, { 'id': 'a-1003', 'predecessorid': 'notfound' }, { 'id': 'a-1004', 'payload': 'invalid' }, ] bulk_data.extend(self.wbo_values) self.log.debug("DATA %s" % simplejson.dumps(bulk_data)) resp = self.app.post( storage_url, headers=auth_header, params=simplejson.dumps(bulk_data) ) self.assertEqual('200 OK', resp.status) result_data = simplejson.loads(resp.body) self.log.debug("RESULT %s" % resp.body) self.assert_(WBO.get_time_now() >= float(result_data['modified'])) expected_ids = [ w['id'] for w in self.wbo_values ] self.assertEqual(expected_ids, result_data['success']) expected_failures = { "": ["invalid id"], "a-1004": ["payload needs to be json-encoded"], "a-1003": ["invalid predecessorid"], "a-1002": ["invalid parentid"], "a-1001": ["invalid sortindex"], "a-1000": ["invalid sortindex"], "foo/bar": ["invalid id", "invalid sortindex"] } self.assertEqual(expected_failures, result_data['failed']) stored_ids = [ w.wbo_id for w in WBO.all() ] for wbo_id in expected_ids: self.assert_(wbo_id in stored_ids)
def test_deletion_by_multiple_ids(self): """Exercise bulk deletion with a set of IDs""" (p, c, ah) = (self.profile, self.collection, self.auth_header) wbos = self.build_wbo_set() wbo_ids = [w.wbo_id for w in wbos] to_delete_ids = wbo_ids[0:len(wbo_ids) / 2] url = '/sync/1.0/%s/storage/%s?ids=%s' % (p.user_name, c.name, ','.join(to_delete_ids)) resp = self.app.delete(url, headers=ah) self.assertEqual('200 OK', resp.status) self.assert_(WBO.get_time_now() >= float(resp.body)) result_ids = [w.wbo_id for w in WBO.all()] for wbo_id in to_delete_ids: self.assert_(wbo_id not in result_ids)
def test_deletion_by_multiple_ids(self): """Exercise bulk deletion with a set of IDs""" (p, c, ah) = (self.profile, self.collection, self.auth_header) wbos = self.build_wbo_set() wbo_ids = [w.wbo_id for w in wbos] to_delete_ids = wbo_ids[0:len(wbo_ids)/2] url = '/sync/1.0/%s/storage/%s?ids=%s' % ( p.user_name, c.name, ','.join(to_delete_ids) ) resp = self.app.delete(url, headers=ah) self.assertEqual('200 OK', resp.status) self.assert_(WBO.get_time_now() >= float(resp.body)) result_ids = [w.wbo_id for w in WBO.all()] for wbo_id in to_delete_ids: self.assert_(wbo_id not in result_ids)
def test_bulk_update(self): """Exercise bulk collection update""" (p, c, ah) = (self.profile, self.collection, self.auth_header) auth_header = self.build_auth_header() storage_url = '/sync/1.0/%s/storage/%s' % (p.user_name, c.name) self.build_wbo_parents_and_predecessors() bulk_data = [ { 'id': '' }, { 'id': 'foo/bar', 'sortindex': 'abcd' }, { 'id': 'a-1000', 'sortindex': -1000000000 }, { 'id': 'a-1001', 'sortindex': 1000000000 }, { 'id': 'a-1002', 'parentid': 'notfound' }, { 'id': 'a-1003', 'predecessorid': 'notfound' }, { 'id': 'a-1004', 'payload': 'invalid' }, ] bulk_data.extend(self.wbo_values) self.log.debug("DATA %s" % simplejson.dumps(bulk_data)) resp = self.app.post(storage_url, headers=auth_header, params=simplejson.dumps(bulk_data)) self.assertEqual('200 OK', resp.status) result_data = simplejson.loads(resp.body) self.log.debug("RESULT %s" % resp.body) self.assert_(WBO.get_time_now() >= float(result_data['modified'])) expected_ids = [w['id'] for w in self.wbo_values] self.assertEqual(expected_ids, result_data['success']) expected_failures = { "": ["invalid id"], "a-1004": ["payload needs to be json-encoded"], "a-1003": ["invalid predecessorid"], "a-1002": ["invalid parentid"], "a-1001": ["invalid sortindex"], "a-1000": ["invalid sortindex"], "foo/bar": ["invalid id", "invalid sortindex"] } self.assertEqual(expected_failures, result_data['failed']) stored_ids = [w.wbo_id for w in WBO.all()] for wbo_id in expected_ids: self.assert_(wbo_id in stored_ids)
def test_collection_counts_and_timestamps(self): """Exercise collection counts and timestamps""" profile = Profile(user_name='tester-1', user_id='8675309', password='******') profile.put() auth_header = self.build_auth_header(profile.user_name, profile.password) expected_count_all = 0 expected_counts = { 'clients': 2, 'crypto': 0, 'forms': 6, 'history': 0, 'keys': 10, 'meta': 12, 'bookmarks': 14, 'prefs': 16, 'tabs': 18, 'passwords': 20, 'foo': 12, 'bar': 14, 'baz': 16 } expected_dates = {} # Insert objects with random contents to satisfy the expected counts for collection_name, curr_count in expected_counts.items(): base_url = '/sync/1.0/%s/storage/%s' % (profile.user_name, collection_name) for i in range(curr_count): resp = self.put_random_wbo(base_url, auth_header) expected_dates[collection_name] = float(resp.body) expected_count_all += 1 # Ensure the counts match expected resp = self.app.get('/sync/1.0/%s/info/collection_counts' % (profile.user_name), headers=auth_header) resp_data = simplejson.loads(resp.body) self.assertEqual(expected_counts, resp_data) # Ensure all timestamps are same or newer than expected. resp = self.app.get('/sync/1.0/%s/info/collections' % (profile.user_name), headers=auth_header) resp_data = simplejson.loads(resp.body) for k, v in expected_dates.items(): self.assert_(k in resp_data) self.assert_(resp_data[k] >= expected_dates[k]) # Verify the count of all objects after creating result_count = WBO.all().count() self.assertEqual(expected_count_all, result_count) # Delete each collection and verify the count after for collection_name, curr_count in expected_counts.items(): url = '/sync/1.0/%s/storage/%s' % (profile.user_name, collection_name) resp = self.app.delete(url, headers=auth_header) self.assert_(WBO.get_time_now() >= float(resp.body)) expected_count_all -= curr_count result_count = WBO.all().count() self.assertEqual(expected_count_all, result_count) # No WBOs should be left after all collections deleted. result_count = WBO.all().count() self.assertEqual(0, result_count)
def test_collection_counts_and_timestamps(self): """Exercise collection counts and timestamps""" profile = Profile(user_name = 'tester-1', user_id='8675309', password = '******') profile.put() auth_header = self.build_auth_header( profile.user_name, profile.password ) expected_count_all = 0 expected_counts = { 'clients':2, 'crypto':0, 'forms':6, 'history':0, 'keys':10, 'meta':12, 'bookmarks':14, 'prefs':16, 'tabs':18, 'passwords':20, 'foo':12, 'bar':14, 'baz':16 } expected_dates = {} # Insert objects with random contents to satisfy the expected counts for collection_name, curr_count in expected_counts.items(): base_url = '/sync/1.0/%s/storage/%s' % ( profile.user_name, collection_name ) for i in range(curr_count): resp = self.put_random_wbo(base_url, auth_header) expected_dates[collection_name] = float(resp.body) expected_count_all += 1 # Ensure the counts match expected resp = self.app.get( '/sync/1.0/%s/info/collection_counts' % (profile.user_name), headers=auth_header ) resp_data = simplejson.loads(resp.body) self.assertEqual(expected_counts, resp_data) # Ensure all timestamps are same or newer than expected. resp = self.app.get( '/sync/1.0/%s/info/collections' % (profile.user_name), headers=auth_header ) resp_data = simplejson.loads(resp.body) for k,v in expected_dates.items(): self.assert_(k in resp_data) self.assert_(resp_data[k] >= expected_dates[k]) # Verify the count of all objects after creating result_count = WBO.all().count() self.assertEqual(expected_count_all, result_count) # Delete each collection and verify the count after for collection_name, curr_count in expected_counts.items(): url = '/sync/1.0/%s/storage/%s' % ( profile.user_name, collection_name ) resp = self.app.delete(url, headers=auth_header) self.assert_(WBO.get_time_now() >= float(resp.body)) expected_count_all -= curr_count result_count = WBO.all().count() self.assertEqual(expected_count_all, result_count) # No WBOs should be left after all collections deleted. result_count = WBO.all().count() self.assertEqual(0, result_count)