class GpdbVerifyRegressionTests(unittest.TestCase): def __init__(self, methodName): self.gpv = GpdbVerify() super(GpdbVerifyRegressionTests, self).__init__(methodName) def setUp(self): PSQL.run_sql_command('create database gptest;', dbname='postgres') def tearDown(self): PSQL.run_sql_command('drop database gptest', dbname='postgres') def test_gpcheckcat(self): (a, b, c, d) = self.gpv.gpcheckcat() self.assertIn(a, (0, 1, 2)) def test_gpcheckmirrorseg(self): (res, fix_file) = self.gpv.gpcheckmirrorseg() self.assertIn(res, (True, False)) def test_check_db_is_running(self): self.assertTrue(self.gpv.check_db_is_running()) def test_run_repairscript(self): repair_script = local_path('gpcheckcat_repair') res = self.gpv.run_repair_script(repair_script) self.assertIn(res, (True, False)) def test_ignore_extra_m(self): fix_file = local_path('fix_file') res = self.gpv.ignore_extra_m(fix_file) self.assertIn(res, (True, False))
class DbStateClass(MPPTestCase): def __init__(self,methodName,config=None): if config is not None: self.config = config else: self.config = GPDBConfig() self.gpverify = GpdbVerify(config=self.config) super(DbStateClass,self).__init__(methodName) def check_system(self): ''' @summary: Check whether the system is up and sync. Exit out if not ''' cmd ="select count(*) from gp_segment_configuration where content<> -1 ;" count_all = PSQL.run_sql_command(cmd, flags ='-q -t', dbname='postgres') cmd ="select count(*) from gp_segment_configuration where content<> -1 and mode = 's' and status = 'u';" count_up_and_sync = PSQL.run_sql_command(cmd, flags ='-q -t', dbname='postgres') if count_all.strip() != count_up_and_sync.strip() : raise Exception('The cluster is not in up/sync ............') else: tinctest.logger.info("\n Starting New Test: System is up and in sync .........") def check_catalog(self,dbname=None, alldb=True, online=False, testname=None, outputFile=None, host=None, port=None): '''1. Run gpcheckcat''' (errorCode, hasError, gpcheckcat_output, repairScriptDir) = self.gpverify.gpcheckcat(dbname=dbname, alldb=alldb, online=online, testname=testname, outputFile=outputFile, host=host, port=port) if errorCode != 0: raise Exception('GpCheckcat failed with errcode %s '% (errorCode)) def check_mirrorintegrity(self, master=False): '''Runs checkmirrorintegrity(default), check_mastermirrorintegrity(when master=True) ''' (checkmirror, fix_outfile) = self.gpverify.gpcheckmirrorseg(master=master) if not checkmirror: self.fail('Checkmirrorseg failed. Fix file location : %s' %fix_outfile) tinctest.logger.info('Successfully completed integrity check') def run_validation(self): ''' 1. gpcheckcat 2. checkmirrorintegrity 3. check_mastermirrorintegrity ''' self.check_catalog() self.check_mirrorintegrity() if self.config.has_master_mirror(): self.check_mirrorintegrity(master=True)
class GpdbVerifyRegressionTests(unittest.TestCase): def __init__(self, methodName): self.gpv = GpdbVerify() super(GpdbVerifyRegressionTests, self).__init__(methodName) def setUp(self): PSQL.run_sql_command('create database gptest;', dbname='postgres') def tearDown(self): PSQL.run_sql_command('drop database gptest', dbname='postgres') def test_gpcheckcat(self): (a, b, c, d) = self.gpv.gpcheckcat() self.assertIn(a, (0, 1, 2)) def test_gpcheckmirrorseg(self): (res, fix_file) = self.gpv.gpcheckmirrorseg() self.assertIn(res, (True, False)) def test_check_db_is_running(self): self.assertTrue(self.gpv.check_db_is_running()) def test_run_repairscript(self): repair_script = local_path('gpcheckcat_repair') res = self.gpv.run_repair_script(repair_script) self.assertIn(res, (True, False)) def test_ignore_extra_m(self): fix_file = local_path('fix_file') res = self.gpv.ignore_extra_m(fix_file) self.assertIn(res, (True, False)) def test_cleanup_old_file(self): old_time = int(time.strftime("%Y%m%d%H%M%S")) - 1005000 old_file = local_path('checkmirrorsegoutput_%s' % old_time) open(old_file, 'w') self.gpv.cleanup_day_old_out_files(local_path('')) self.assertFalse(os.path.isfile(old_file)) def test_not_cleanup_todays_file(self): new_file = local_path('checkmirrorsegoutput_%s' % time.strftime("%Y%m%d%H%M%S")) open(new_file, 'w') self.gpv.cleanup_day_old_out_files(local_path('')) self.assertTrue(os.path.isfile(new_file))
class GpdbVerifyRegressionTests(unittest.TestCase): def __init__(self, methodName): self.gpv = GpdbVerify() super(GpdbVerifyRegressionTests, self).__init__(methodName) def setUp(self): PSQL.run_sql_command('create database gptest;', dbname='postgres') def tearDown(self): PSQL.run_sql_command('drop database gptest', dbname='postgres') def test_gpcheckcat(self): (a,b,c,d) = self.gpv.gpcheckcat() self.assertIn(a,(0,1,2)) def test_gpcheckmirrorseg(self): (res,fix_file) = self.gpv.gpcheckmirrorseg() self.assertIn(res, (True,False)) def test_check_db_is_running(self): self.assertTrue(self.gpv.check_db_is_running()) def test_run_repairscript(self): repair_script = local_path('gpcheckcat_repair') res = self.gpv.run_repair_script(repair_script) self.assertIn(res, (True,False)) def test_ignore_extra_m(self): fix_file = local_path('fix_file') res = self.gpv.ignore_extra_m(fix_file) self.assertIn(res, (True,False)) def test_cleanup_old_file(self): old_time = int(time.strftime("%Y%m%d%H%M%S")) - 1005000 old_file = local_path('checkmirrorsegoutput_%s' % old_time) open(old_file,'w') self.gpv.cleanup_day_old_out_files(local_path('')) self.assertFalse(os.path.isfile(old_file)) def test_not_cleanup_todays_file(self): new_file = local_path('checkmirrorsegoutput_%s' % time.strftime("%Y%m%d%H%M%S")) open(new_file,'w') self.gpv.cleanup_day_old_out_files(local_path('')) self.assertTrue(os.path.isfile(new_file))
class DbStateClass(MPPTestCase): def __init__(self, methodName, config=None): if config is not None: self.config = config else: self.config = GPDBConfig() self.gpverify = GpdbVerify(config=self.config) super(DbStateClass, self).__init__(methodName) def check_system(self): ''' @summary: Check whether the system is up and sync. Exit out if not ''' cmd = "select count(*) from gp_segment_configuration where content<> -1 ;" count_all = PSQL.run_sql_command(cmd, flags='-q -t', dbname='postgres') cmd = "select count(*) from gp_segment_configuration where content<> -1 and mode = 's' and status = 'u';" count_up_and_sync = PSQL.run_sql_command(cmd, flags='-q -t', dbname='postgres') if count_all.strip() != count_up_and_sync.strip(): raise Exception('The cluster is not in up/sync ............') else: tinctest.logger.info( "\n Starting New Test: System is up and in sync .........") def check_catalog(self, dbname=None, alldb=True, online=False, testname=None, outputFile=None, host=None, port=None): '''1. Run gpcheckcat''' (errorCode, hasError, gpcheckcat_output, repairScriptDir) = self.gpverify.gpcheckcat(dbname=dbname, alldb=alldb, online=online, testname=testname, outputFile=outputFile, host=host, port=port) if errorCode != 0: raise Exception('GpCheckcat failed with errcode %s ' % (errorCode)) def check_mirrorintegrity(self, master=False): '''Runs checkmirrorintegrity(default), check_mastermirrorintegrity(when master=True) ''' (checkmirror, fix_outfile) = self.gpverify.gpcheckmirrorseg(master=master) if not checkmirror: self.fail('Checkmirrorseg failed. Fix file location : %s' % fix_outfile) tinctest.logger.info('Successfully completed integrity check') def run_validation(self): ''' 1. gpcheckcat 2. checkmirrorintegrity 3. check_mastermirrorintegrity ''' self.check_catalog() self.check_mirrorintegrity() if self.config.has_master_mirror(): self.check_mirrorintegrity(master=True)