def _add_index_entries_async(tag, entries): """Adds TagIndexEntries to one TagIndex. entries is [[bucket_id, build_id]]. Returns True if made changes. """ idx_key = search.TagIndex.random_shard_key(tag) idx = (yield idx_key.get_async()) or search.TagIndex(key=idx_key) if idx.permanently_incomplete: # no point in adding entries to an incomplete index. raise ndb.Return(False) existing = {e.build_id for e in idx.entries} added = False for bucket_id, build_id in entries: if build_id not in existing: if len(idx.entries) >= search.TagIndex.MAX_ENTRY_COUNT: logging.warning( ('refusing to store more than %d entries in TagIndex(%s); ' 'marking as incomplete.'), search.TagIndex.MAX_ENTRY_COUNT, idx_key.id()) idx.permanently_incomplete = True idx.entries = [] yield idx.put_async() raise ndb.Return(True) idx.entries.append( search.TagIndexEntry(bucket_id=bucket_id, build_id=build_id)) added = True if not added: raise ndb.Return(False) yield idx.put_async() raise ndb.Return(True)
def test_buildset_index_existing(self): search.TagIndex(id='buildset:foo', entries=[ search.TagIndexEntry( build_id=int(2**63 - 1), bucket_id='chromium/try', ), search.TagIndexEntry( build_id=0, bucket_id='chromium/try', ), ]).put() build = self.add(dict(tags=[dict(key='buildset', value='foo')])) index = search.TagIndex.get_by_id('buildset:foo') self.assertIsNotNone(index) self.assertEqual(len(index.entries), 3) self.assertIn(build.key.id(), [e.build_id for e in index.entries]) self.assertIn(build.bucket_id, [e.bucket_id for e in index.entries])
def test_filter_by_with_legacy_index(self): build = test_util.build(id=1) build.put() idx = search.TagIndex( id=self.INDEXED_TAG, entries=[ search.TagIndexEntry(build_id=1), # this entry will be deleted, because bucket_id could not be # resolved. search.TagIndexEntry(build_id=123), ], ) idx.put() builds, _ = self.search(tags=[self.INDEXED_TAG]) self.assertEqual(builds, [build]) idx = idx.key.get() self.assertEqual(len(idx.entries), 1) self.assertEqual(idx.entries[0].bucket_id, 'chromium/try')
def test_filter_by_with_dup_tag_entries(self): build = test_util.build(id=1) build.put() entry = search.TagIndexEntry(build_id=1, bucket_id='chromium/try') search.TagIndex( id=self.INDEXED_TAG, entries=[entry, entry], ).put() builds, _ = self.search(tags=[self.INDEXED_TAG]) self.assertEqual(builds, [build])
def test_flush_entries(self): search.TagIndex(id='buildset:0', entries=[ search.TagIndexEntry(bucket_id='chromium/try', build_id=51), ]).put() search.TagIndex(id='buildset:2', entries=[ search.TagIndexEntry(bucket_id='chromium/try', build_id=1), search.TagIndexEntry(bucket_id='chromium/try', build_id=100), ]).put() backfill_tag_index._flush_entries( 'buildset', { '0': [['chromium/try', 51]], '1': [['chromium/try', 52]], '2': [['chromium/try', 50]], }, ) idx0 = search.TagIndex.get_by_id('buildset:0') self.assertIsNotNone(idx0) self.assertEqual(len(idx0.entries), 1) self.assertEqual(idx0.entries[0].build_id, 51) idx1 = search.TagIndex.get_by_id('buildset:1') self.assertIsNotNone(idx1) self.assertEqual(len(idx1.entries), 1) self.assertEqual(idx1.entries[0].build_id, 52) idx2 = search.TagIndex.get_by_id('buildset:2') self.assertIsNotNone(idx2) self.assertEqual(len(idx2.entries), 3) self.assertEqual({e.build_id for e in idx2.entries}, {1, 50, 100})
def put_build(self, **build_proto_fields): """Puts a build and updates tag index.""" build_proto_fields.setdefault('id', model.create_build_ids(self.now, 1)[0]) self.now += datetime.timedelta(seconds=1) build = test_util.build(**build_proto_fields) build.put() index_entry = search.TagIndexEntry( build_id=build.key.id(), bucket_id=build.bucket_id, ) for t in search.indexed_tags(build.tags): search.add_to_tag_index_async(t, [index_entry]).get_result() return build
def test_add_too_many_to_index(self): limit = search.TagIndex.MAX_ENTRY_COUNT entries = [ search.TagIndexEntry(build_id=i, bucket_id='chromium/try') for i in xrange(limit * 2) ] tag = 'a:b' index_key = search.TagIndex.make_key(0, tag) search.add_to_tag_index_async(tag, entries[:limit]).get_result() self.assertFalse(index_key.get().permanently_incomplete) search.add_to_tag_index_async(tag, entries[limit:]).get_result() self.assertTrue(index_key.get().permanently_incomplete) search.add_to_tag_index_async(tag, entries[limit:]).get_result() self.assertTrue(index_key.get().permanently_incomplete)