def test_index_not_dropped_by_init(self): with self.admin_access.repo_cnx() as cnx: store = MassiveObjectStore(cnx) # noqa cnx.commit() indexes = all_indexes(cnx) self.assertIn('entities_pkey', indexes) self.assertIn(build_index_name('owned_by_relation', ['eid_from', 'eid_to'], 'key_'), indexes) self.assertIn(build_index_name('owned_by_relation', ['eid_from'], 'idx_'), indexes)
def test_consider_metagen(self): """Ensure index on owned_by is not deleted if we don't consider this metadata.""" with self.admin_access.repo_cnx() as cnx: metagen = stores.MetadataGenerator(cnx, meta_skipped=('owned_by',)) store = MassiveObjectStore(cnx, metagen=metagen) store._drop_metadata_constraints() indexes = all_indexes(cnx) self.assertIn(build_index_name('owned_by_relation', ['eid_from', 'eid_to'], 'key_'), indexes) self.assertIn(build_index_name('owned_by_relation', ['eid_from'], 'idx_'), indexes)
def expected_indexes(cnx): """Return a dictionary describing indexes expected by the schema {index name: (table, column)}. This doesn't include primary key indexes. """ source = cnx.repo.system_source dbh = source.dbhelper schema = cnx.repo.schema schema_indexes = SYSTEM_INDEXES.copy() if source.dbdriver == 'postgres': schema_indexes.update({'appears_words_idx': ('appears', 'words')}) else: schema_indexes.update({'appears_uid': ('appears', 'uid'), 'appears_word_id': ('appears', 'word_id')}) for rschema in schema.relations(): if rschema.rule or rschema in PURE_VIRTUAL_RTYPES: continue # computed relation if rschema.final or rschema.inlined: for rdef in rschema.rdefs.values(): table = 'cw_{0}'.format(rdef.subject) column = 'cw_{0}'.format(rdef.rtype) if any(isinstance(cstr, UniqueConstraint) for cstr in rdef.constraints): schema_indexes[dbh._index_name(table, column, unique=True)] = ( table, [column]) if rschema.inlined or rdef.indexed: schema_indexes[dbh._index_name(table, column)] = (table, [column]) else: table = '{0}_relation'.format(rschema) if source.dbdriver == 'postgres': # index built after the primary key constraint schema_indexes[build_index_name(table, ['eid_from', 'eid_to'], 'key_')] = ( table, ['eid_from', 'eid_to']) schema_indexes[build_index_name(table, ['eid_from'], 'idx_')] = ( table, ['eid_from']) schema_indexes[build_index_name(table, ['eid_to'], 'idx_')] = ( table, ['eid_to']) for eschema in schema.entities(): if eschema.final: continue table = 'cw_{0}'.format(eschema) for columns, index_name in iter_unique_index_names(eschema): schema_indexes[index_name] = (table, columns) return schema_indexes
def test_drop_index_recreation(self): with self.admin_access.repo_cnx() as cnx: store = MassiveObjectStore(cnx) store._drop_metadata_constraints() indexes = all_indexes(cnx) self.assertIn('entities_pkey', indexes) self.assertNotIn(build_index_name('owned_by_relation', ['eid_from', 'eid_to'], 'key_'), indexes) self.assertNotIn(build_index_name('owned_by_relation', ['eid_from'], 'idx_'), indexes) store.finish() indexes = all_indexes(cnx) self.assertIn('entities_pkey', indexes) self.assertIn(build_index_name('owned_by_relation', ['eid_from', 'eid_to'], 'key_'), indexes) self.assertIn(build_index_name('owned_by_relation', ['eid_from'], 'idx_'), indexes)
def test_index_building(self): with self.admin_access.repo_cnx() as cnx: store = MassiveObjectStore(cnx) self.push_geonames_data(self.datapath('geonames.csv'), store) store.flush() # Check index indexes = all_indexes(cnx) self.assertIn('entities_pkey', indexes) self.assertIn(build_index_name('owned_by_relation', ['eid_from', 'eid_to'], 'key_'), indexes) self.assertIn(build_index_name('owned_by_relation', ['eid_from'], 'idx_'), indexes) # Cleanup -> index store.finish() # Check index again indexes = all_indexes(cnx) self.assertIn('entities_pkey', indexes) self.assertIn(build_index_name('owned_by_relation', ['eid_from', 'eid_to'], 'key_'), indexes) self.assertIn(build_index_name('owned_by_relation', ['eid_from'], 'idx_'), indexes)
for rschema in schema.relations(): if rschema.rule or rschema in PURE_VIRTUAL_RTYPES: continue if rschema.final or rschema.inlined: for rdef in rschema.rdefs.values(): table = 'cw_{0}'.format(rdef.subject) column = 'cw_{0}'.format(rdef.rtype) if rschema.inlined or rdef.indexed: old_name = '%s_%s_idx' % (table.lower(), column.lower()) sql('DROP INDEX IF EXISTS %s' % old_name) source.create_index(cnx, table, column) else: table = '{0}_relation'.format(rschema) sql('ALTER TABLE %s ADD CONSTRAINT %s PRIMARY KEY(eid_from, eid_to)' % (table, build_index_name(table, ['eid_from', 'eid_to'], 'key_'))) for column in ('from', 'to'): sql('DROP INDEX IF EXISTS %s_%s_idx' % (table, column)) sql('CREATE INDEX %s ON %s(eid_%s);' % (build_index_name(table, ['eid_' + column], 'idx_'), table, column)) # we changed constraint serialization, which also changes their name for table, cstr in sql(""" SELECT DISTINCT table_name, constraint_name FROM information_schema.constraint_column_usage WHERE constraint_name LIKE 'cstr%'"""): sql("ALTER TABLE %(table)s DROP CONSTRAINT %(cstr)s" % locals()) for cwconstraint in rql('Any C WHERE R constrained_by C').entities(): cwrdef = cwconstraint.reverse_constrained_by[0] rdef = cwrdef.yams_schema()
for rschema in schema.relations(): if rschema.rule or rschema in PURE_VIRTUAL_RTYPES: continue if rschema.final or rschema.inlined: for rdef in rschema.rdefs.values(): table = 'cw_{0}'.format(rdef.subject) column = 'cw_{0}'.format(rdef.rtype) if rschema.inlined or rdef.indexed: old_name = '%s_%s_idx' % (table.lower(), column.lower()) sql('DROP INDEX IF EXISTS %s' % old_name) source.create_index(cnx, table, column) else: table = '{0}_relation'.format(rschema) sql('ALTER TABLE %s ADD CONSTRAINT %s PRIMARY KEY(eid_from, eid_to)' % (table, build_index_name(table, ['eid_from', 'eid_to'], 'key_'))) for column in ('from', 'to'): sql('DROP INDEX IF EXISTS %s_%s_idx' % (table, column)) sql('CREATE INDEX %s ON %s(eid_%s);' % (build_index_name( table, ['eid_' + column], 'idx_'), table, column)) # we changed constraint serialization, which also changes their name for table, cstr in sql(""" SELECT DISTINCT table_name, constraint_name FROM information_schema.constraint_column_usage WHERE constraint_name LIKE 'cstr%'"""): sql("ALTER TABLE %(table)s DROP CONSTRAINT %(cstr)s" % locals()) for cwconstraint in rql('Any C WHERE R constrained_by C').entities(): cwrdef = cwconstraint.reverse_constrained_by[0] rdef = cwrdef.yams_schema()