def impl(context, seg, contents): role = "" if seg == "primary": role = 'p' elif seg == "mirror": role = 'm' else: raise Exception('Valid segment types are "primary" and "mirror"') contents = contents.split(',') for content in contents: with closing(dbconn.connect(dbconn.DbURL(), unsetSearchPath=False)) as conn: dbid = dbconn.querySingleton( conn, "SELECT dbid FROM gp_segment_configuration WHERE content = %s AND preferred_role = '%s'" % (content, role)) with closing(dbconn.connect(dbconn.DbURL(), unsetSearchPath=False)) as conn: actual_tuples = dbconn.querySingleton( conn, "SELECT count(*) FROM gp_configuration_history WHERE dbid = %d AND gp_configuration_history.desc LIKE 'gprecoverseg: segment config for backout%%';" % dbid) original_tuples = context.original_config_history_info["{}_{}".format( content, role)] if actual_tuples != original_tuples + 1: # Running the backout script should have inserted exactly 1 entry raise Exception( "Expected configuration history table for dbid {} to contain {} backout entries, found {}" .format(dbid, original_tuples + 1, actual_tuples))
def impl(context, contents): contents = set(contents.split(',')) all_segments = GpArray.initFromCatalog(dbconn.DbURL()).getDbList() context.original_seg_info = {} context.original_config_history_info = {} context.original_config_history_backout_count = 0 for seg in all_segments: content = str(seg.getSegmentContentId()) if content not in contents: continue preferred_role = seg.getSegmentPreferredRole() key = "{}_{}".format(content, preferred_role) context.original_seg_info[key] = seg with closing(dbconn.connect(dbconn.DbURL(), unsetSearchPath=False)) as conn: dbid = dbconn.querySingleton( conn, "SELECT dbid FROM gp_segment_configuration WHERE content = %s AND " "preferred_role = '%s'" % (content, preferred_role)) with closing(dbconn.connect(dbconn.DbURL(), unsetSearchPath=False)) as conn: num_tuples = dbconn.querySingleton( conn, "SELECT count(*) FROM gp_configuration_history WHERE dbid = %d AND " "gp_configuration_history.desc LIKE 'gprecoverseg: segment config for backout%%';" % dbid) context.original_config_history_info[key] = num_tuples with closing(dbconn.connect(dbconn.DbURL(), unsetSearchPath=False)) as conn: context.original_config_history_backout_count = dbconn.querySingleton( conn, "SELECT count(*) FROM gp_configuration_history WHERE " "gp_configuration_history.desc LIKE 'gprecoverseg: segment config for backout%%';" )
def check_count_for_specific_query(dbname, query, nrows): NUM_ROWS_QUERY = '%s' % query # We want to bubble up the exception so that if table does not exist, the test fails with closing(dbconn.connect(dbconn.DbURL(dbname=dbname), unsetSearchPath=False)) as conn: result = dbconn.querySingleton(conn, NUM_ROWS_QUERY) if result != nrows: raise Exception('%d rows in query: %s. Expected row count = %d' % (result, query, nrows))
def has_expected_status(content, preferred_role, expected_status): with dbconn.connect(dbconn.DbURL(dbname="template1"), unsetSearchPath=False) as conn: status = dbconn.querySingleton( conn, "SELECT status FROM gp_segment_configuration WHERE content = %s AND preferred_role = '%s'" % (content, preferred_role)) return status == expected_status
def execute_sql_singleton(dbname, sql): result = None with closing(dbconn.connect(dbconn.DbURL(dbname=dbname), unsetSearchPath=False)) as conn: result = dbconn.querySingleton(conn, sql) if result is None: raise Exception("error running query: %s" % sql) return result
def impl(context, tablename, dbname): conn = dbconn.connect(dbconn.DbURL(dbname=dbname), unsetSearchPath=False) try: query = "select count(*) from pg_statistic where starelid='%s'::regclass;" % tablename num_tuples = dbconn.querySingleton(conn, query) if num_tuples == 0: raise Exception("Expected partition table %s to contain root statistics" % tablename) finally: conn.close()
def validate_mid_level_partitions(self, schema_name, table_name): partition_level, max_level = None, None conn = dbconn.connect( dbconn.DbURL(dbname=self.database, port=self.port)) try: parent_schema, parent_table = self.parent_partition_map[( schema_name, table_name)] if (parent_schema, parent_table) == (schema_name, table_name): return try: max_level = dbconn.querySingleton( conn, """SELECT max(partitionlevel) FROM pg_partitions WHERE tablename='%s' AND schemaname='%s' """ % (parent_table, parent_schema)) except Exception as e: logger.debug( 'Unable to get the maximum partition level for table %s: (%s)' % (table_name, str(e))) try: partition_level = dbconn.querySingleton( conn, """SELECT partitionlevel FROM pg_partitions WHERE partitiontablename='%s' AND partitionschemaname='%s' """ % (table_name, schema_name)) except Exception as e: logger.debug( 'Unable to get the partition level for table %s: (%s)' % (table_name, str(e))) if partition_level != max_level: logger.error( 'Partition level of the table = %s, Max partition level = %s' % (partition_level, max_level)) raise Exception( 'Mid Level partition %s.%s is not supported by gpreload. Please specify only leaf partitions or parent table name' % (schema_name, table_name)) finally: conn.close()
def get_redistribute_status(self): sql = 'select status from gpexpand.status order by updated desc limit 1' dburl = dbconn.DbURL(dbname=self.database) conn = dbconn.connect(dburl, encoding='UTF8', unsetSearchPath=False) status = dbconn.querySingleton(conn, sql) if status == 'EXPANSION COMPLETE': rc = 0 else: rc = 1 return rc
def must_have_expected_status(content, preferred_role, expected_status): with dbconn.connect(dbconn.DbURL(dbname="template1"), unsetSearchPath=False) as conn: status = dbconn.querySingleton( conn, "SELECT status FROM gp_segment_configuration WHERE content = %s AND preferred_role = '%s'" % (content, preferred_role)) if status != expected_status: raise Exception("Expected status for role %s to be %s, but it is %s" % (preferred_role, expected_status, status))
def check_row_count(context, tablename, dbname, nrows): NUM_ROWS_QUERY = 'select count(*) from %s' % tablename # We want to bubble up the exception so that if table does not exist, the test fails if hasattr(context, 'standby_was_activated') and context.standby_was_activated is True: dburl = dbconn.DbURL(dbname=dbname, port=context.standby_port, hostname=context.standby_hostname) else: dburl = dbconn.DbURL(dbname=dbname) with closing(dbconn.connect(dburl, unsetSearchPath=False)) as conn: result = dbconn.querySingleton(conn, NUM_ROWS_QUERY) if result != nrows: raise Exception('%d rows in table %s.%s, expected row count = %d' % (result, dbname, tablename, nrows))
def impl(context, expected_additional_entries): with closing(dbconn.connect(dbconn.DbURL(), unsetSearchPath=False)) as conn: actual_backout_entries = int( dbconn.querySingleton( conn, "SELECT count(*) FROM gp_configuration_history WHERE " "gp_configuration_history.desc LIKE " "'gprecoverseg: segment config for backout%%';")) expected_total_entries = int(context.original_config_history_backout_count ) + int(expected_additional_entries) if actual_backout_entries != expected_total_entries: raise Exception( "Expected configuration history table to have {} backout entries, found {}" .format( context.original_config_history_backout_count + expected_additional_entries, actual_backout_entries))
def validate_table(self, schema_name, table_name): conn = dbconn.connect( dbconn.DbURL(dbname=self.database, port=self.port)) try: c = dbconn.querySingleton( conn, """SELECT count(*) FROM pg_class, pg_namespace WHERE pg_namespace.nspname = '{schema}' AND pg_class.relname = '{table}'""". format(schema=schema_name, table=table_name)) if not c: raise ExceptionNoStackTraceNeeded( 'Table {schema}.{table} does not exist'.format( schema=schema_name, table=table_name)) finally: conn.close()
def impl(context, contents): contents = contents.split(',') for content in contents: with closing( dbconn.connect(dbconn.DbURL(dbname='template1'), unsetSearchPath=False)) as conn: for role in [ROLE_PRIMARY, ROLE_MIRROR]: actual_datadir = dbconn.querySingleton( conn, "SELECT datadir FROM gp_segment_configuration WHERE preferred_role='{}' AND " "content = {};".format(role, content)) expected_datadir = context.original_seg_info["{}_{}".format( content, role)].getSegmentDataDirectory() if not expected_datadir == actual_datadir: raise Exception( "Expected datadir {}, got {} for content {}".format( expected_datadir, actual_datadir, content))