示例#1
0
 def __init__(self):
     self.stdby = StandbyVerify()
     self.runmixin = StandbyRunMixin()
     self.runmixin.createdb(dbname='walrepl')
     self.mdd = os.environ.get('MASTER_DATA_DIRECTORY')
     self.config = GPDBConfig()
     self.pgutil = GpUtility()
     self.host = socket.gethostname()
示例#2
0
 def __init__(self, methodName):
     self.gpact = GpactivateStandby()
     self.gpinit = GpinitStandby()
     self.runmixin = StandbyRunMixin()
     self.runmixin.createdb(dbname=self.dbname)
     self.bkup_timestamp = ""
     self.gphome = os.environ.get('GPHOME')
     self.mdd = os.environ.get('MASTER_DATA_DIRECTORY')
     self.pgport = os.environ.get('PGPORT')
     self.host = socket.gethostname()
     self.standby_loc = os.path.split(self.mdd)[0] + self.standby_dirname
     super(BkupRestore, self).__init__(methodName)
示例#3
0
    def __init__(self):
        self.gpinit = GpinitStandby()
        self.pgutil = GpUtility()
        self.runmixin = StandbyRunMixin()
        self.runmixin.createdb(dbname='walrepl')
        self.gphome = os.environ.get('GPHOME')
        self.pgport = os.environ.get('PGPORT')
        self.mdd = os.environ.get('MASTER_DATA_DIRECTORY')
        self.config = GPDBConfig()
        self.host = socket.gethostname()

        dburl = dbconn.DbURL()
        gparray = GpArray.initFromCatalog(dburl, utility=True)
        self.numcontent = gparray.getNumSegmentContents()
        self.orig_master = gparray.master
示例#4
0
 def __init__(self):
     self.stdby = StandbyVerify()
     self.runmixin = StandbyRunMixin()
     self.runmixin.createdb(dbname='walrepl')        
     self.mdd = os.environ.get('MASTER_DATA_DIRECTORY')
     self.config = GPDBConfig()
     self.pgutil = GpUtility()
     self.host = socket.gethostname()
示例#5
0
文件: __init__.py 项目: 50wu/gpdb
 def __init__(self,methodName):
     self.gpact = GpactivateStandby()
     self.gpinit = GpinitStandby()
     self.runmixin = StandbyRunMixin()
     self.runmixin.createdb(dbname=self.dbname)
     self.bkup_timestamp = ""
     self.gphome = os.environ.get('GPHOME')
     self.mdd = os.environ.get('MASTER_DATA_DIRECTORY')
     self.pgport = os.environ.get('PGPORT')
     self.host = socket.gethostname()
     self.standby_loc = os.path.split(self.mdd)[0]+self.standby_dirname
     super(BkupRestore,self).__init__(methodName)
示例#6
0
    def __init__(self):
        self.gpinit = GpinitStandby()
        self.pgutil = GpUtility()
        self.runmixin = StandbyRunMixin()
        self.runmixin.createdb(dbname='walrepl')
        self.gphome = os.environ.get('GPHOME')
        self.pgport = os.environ.get('PGPORT')
        self.mdd = os.environ.get('MASTER_DATA_DIRECTORY')
        self.config = GPDBConfig()
        self.host = socket.gethostname()

        dburl = dbconn.DbURL()
        gparray = GpArray.initFromCatalog(dburl, utility=True)
        self.numcontent = gparray.getNumSegmentContents()
        self.orig_master = gparray.master
示例#7
0
文件: verify.py 项目: 50wu/gpdb
 def __init__(self):
     self.stdby = StandbyRunMixin()
示例#8
0
文件: verify.py 项目: 50wu/gpdb
class StandbyVerify(object):
    '''Class for standby verification 
       Disclaimer: Some of these may repeat with the mpp/lib version'''
    def __init__(self):
        self.stdby = StandbyRunMixin()
        

    def _run_remote_command(self, host, command):
        rmt_cmd = "gpssh -h %s -e '%s' " % (host, command)
        cmd = Command(name='Running a remote command', cmdStr = rmt_cmd)
        cmd.run(validateAfter=False)
        result = cmd.get_results()
        tinctest.logger.info('%s\n%s' %(rmt_cmd, result.stdout))
        return result.stdout

    def get_pg_stat_replication(self):
        '''Returns the pg_stat_replication result as a list'''
        if self.stdby.wait_for_walsender() == 0:
            raise WalReplException('Standby Replication has not started')

        with DbConn(utility=True, dbname='template1') as conn:
            return conn.execute("""
                SELECT
                    procpid,
                    usesysid,
                    usename,
                    application_name,
                    client_addr,
                    client_port,
                    backend_start,
                    state,
                    sent_location,
                    write_location,
                    flush_location,
                    replay_location,
                    sync_priority,
                    sync_state
                FROM
                    pg_stat_replication
             """)

    def check_gp_segment_config(self):
        ''' Check for the new entry in gp_segment_configuration'''
        sm_count = PSQL.run_sql_command("select count(*) from gp_segment_configuration where content='-1' and role='m';", flags='-q -t', dbname='postgres')
        if int(sm_count.strip()) != 1:
            return False
        tinctest.logger.info('A new entry is added for standby in gp_segment_configuration')
        return True

    def check_pg_stat_replication(self):
        '''Check the state and sync_state from pg_stat_replication '''
        for i in walrepl.polling(max_try=20, interval=0.5):
            res = self.get_pg_stat_replication()
            if len(res) == 0:
                continue
            elif res[0].state == 'streaming' and res[0].sync_state == 'sync':
                tinctest.logger.info('pg_stat_replication is updated with the information')
                return True
            else:
                continue 
        return False

    def check_standby_processes(self):
        '''Check if all the standby processes are present '''

        # Get hostname and data directory of standby, if any.
        # We could use gparray, but for now let's stay away from gppylib
        with DbConn(dbname='postgres') as conn:
            results = conn.execute("""
                SELECT hostname, fselocation
                FROM gp_segment_configuration
                INNER JOIN pg_filespace_entry ON dbid = fsedbid
                WHERE fsefsoid = 3052 AND content = -1 AND role = 'm'
                """)
            # If standby is not configured, there must not be any standby processes.
            if len(results) == 0:
                return False
            host = results[0].hostname
            datadir = results[0].fselocation

        # We look for these processes that are spawned from standby postmaster.
        # They should have postmaster pid as ppid.  We minimize remote operation
        # cost by getting ps output once, and search for these strings from the
        # output lines using regexp.
        process_list  = ['master logger process', 'startup process', 'wal receiver process']
        target_process = '(' + '|'.join(process_list) + ')'
        postmaster_pid = walrepl.get_postmaster_pid(datadir, host)
        # If postmaster does not exit, child processes are not present.
        if postmaster_pid == -1:
            return False

        # Make it string for the later comparison
        postmaster_pid = str(postmaster_pid)
        cmd = SmartCmd('ps -e -o ppid,command | grep [p]ostgres', host=host)
        cmd.run()
        standby_processes = []
        for line in cmd.get_results().stdout.splitlines(True):
            line = line.strip()
            (ppid, command) = re.split(r'\s+', line, 1)
            if ppid == postmaster_pid and re.search(target_process, command):
                standby_processes.append(line)

        # If we found more or less than expected, we don't know.
        if len(standby_processes) != len(process_list):
            return False
        tinctest.logger.info('All the standby processes are present at standby host''')
        return True
示例#9
0
文件: verify.py 项目: kevinwangl/gpdb
 def __init__(self):
     self.stdby = StandbyRunMixin()
示例#10
0
文件: verify.py 项目: kevinwangl/gpdb
class StandbyVerify(object):
    '''Class for standby verification 
       Disclaimer: Some of these may repeat with the mpp/lib version'''
    def __init__(self):
        self.stdby = StandbyRunMixin()

    def _run_remote_command(self, host, command):
        rmt_cmd = "gpssh -h %s -e '%s' " % (host, command)
        cmd = Command(name='Running a remote command', cmdStr=rmt_cmd)
        cmd.run(validateAfter=False)
        result = cmd.get_results()
        tinctest.logger.info('%s\n%s' % (rmt_cmd, result.stdout))
        return result.stdout

    def get_pg_stat_replication(self):
        '''Returns the pg_stat_replication result as a list'''
        if self.stdby.wait_for_walsender() == 0:
            raise WalReplException('Standby Replication has not started')

        with DbConn(utility=True, dbname='template1') as conn:
            return conn.execute("""
                SELECT
                    procpid,
                    usesysid,
                    usename,
                    application_name,
                    client_addr,
                    client_port,
                    backend_start,
                    state,
                    sent_location,
                    write_location,
                    flush_location,
                    replay_location,
                    sync_priority,
                    sync_state
                FROM
                    pg_stat_replication
             """)

    def check_gp_segment_config(self):
        ''' Check for the new entry in gp_segment_configuration'''
        sm_count = PSQL.run_sql_command(
            "select count(*) from gp_segment_configuration where content='-1' and role='m';",
            flags='-q -t',
            dbname='postgres')
        if int(sm_count.strip()) != 1:
            return False
        tinctest.logger.info(
            'A new entry is added for standby in gp_segment_configuration')
        return True

    def check_pg_stat_replication(self):
        '''Check the state and sync_state from pg_stat_replication '''
        for i in walrepl.polling(max_try=20, interval=0.5):
            res = self.get_pg_stat_replication()
            if len(res) == 0:
                continue
            elif res[0].state == 'streaming' and res[0].sync_state == 'sync':
                tinctest.logger.info(
                    'pg_stat_replication is updated with the information')
                return True
            else:
                continue
        return False

    def check_standby_processes(self):
        '''Check if all the standby processes are present '''

        # Get hostname and data directory of standby, if any.
        # We could use gparray, but for now let's stay away from gppylib
        with DbConn(dbname='postgres') as conn:
            results = conn.execute("""
                SELECT hostname, datadir
                FROM gp_segment_configuration
                WHERE content = -1 AND role = 'm'
                """)
            # If standby is not configured, there must not be any standby processes.
            if len(results) == 0:
                return False
            host = results[0].hostname
            datadir = results[0].datadir

        # We look for these processes that are spawned from standby postmaster.
        # They should have postmaster pid as ppid.  We minimize remote operation
        # cost by getting ps output once, and search for these strings from the
        # output lines using regexp.
        process_list = [
            'master logger process', 'startup process', 'wal receiver process'
        ]
        target_process = '(' + '|'.join(process_list) + ')'
        postmaster_pid = walrepl.get_postmaster_pid(datadir, host)
        # If postmaster does not exit, child processes are not present.
        if postmaster_pid == -1:
            return False

        # Make it string for the later comparison
        postmaster_pid = str(postmaster_pid)
        cmd = SmartCmd('ps -e -o ppid,command | grep [p]ostgres', host=host)
        cmd.run()
        standby_processes = []
        for line in cmd.get_results().stdout.splitlines(True):
            line = line.strip()
            (ppid, command) = re.split(r'\s+', line, 1)
            if ppid == postmaster_pid and re.search(target_process, command):
                standby_processes.append(line)

        # If we found more or less than expected, we don't know.
        if len(standby_processes) != len(process_list):
            return False
        tinctest.logger.info(
            'All the standby processes are present at standby host'
            '')
        return True
示例#11
0
class GpactivateStandby(object):
    '''Class for gpactivatestandby operations '''

    standby_port = '5656'
    db_name = 'walrepl'

    def __init__(self):
        self.gpinit = GpinitStandby()
        self.pgutil = GpUtility()
        self.runmixin = StandbyRunMixin()
        self.runmixin.createdb(dbname='walrepl')
        self.gphome = os.environ.get('GPHOME')
        self.pgport = os.environ.get('PGPORT')
        self.mdd = os.environ.get('MASTER_DATA_DIRECTORY')
        self.config = GPDBConfig()
        self.host = socket.gethostname()

        dburl = dbconn.DbURL()
        gparray = GpArray.initFromCatalog(dburl, utility=True)
        self.numcontent = gparray.getNumSegmentContents()
        self.orig_master = gparray.master

    def run_remote(self, standbyhost, rmt_cmd, pgport='', standbydd=''):
        '''Runs remote command and returns rc, result '''
        export_cmd = "source %s/greenplum_path.sh;export PGPORT=%s;export MASTER_DATA_DIRECTORY=%s" % (
            self.gphome, pgport, standbydd)
        remote_cmd = "gpssh -h %s -e '%s; %s'" % (standbyhost, export_cmd,
                                                  rmt_cmd)
        cmd = Command(name='Running Remote command', cmdStr='%s' % remote_cmd)
        tinctest.logger.info(" %s" % cmd)
        cmd.run(validateAfter=False)
        result = cmd.get_results()
        return result.rc, result.stdout

    def activate(self, option=''):
        ''' Stop the master and activate current standby to master'''
        standby_host = self.get_current_standby()
        standby_port = self.get_standby_port()
        standby_loc = self.get_standby_dd()

        self.run_remote(self.host,
                        'gpstop -aim',
                        pgport=self.pgport,
                        standbydd=self.mdd)

        gpactivate_cmd = 'gpactivatestandby -a -d %s %s' % (standby_loc,
                                                            option)
        (rc, result) = self.run_remote(standby_host,
                                       gpactivate_cmd,
                                       pgport=standby_port,
                                       standbydd=standby_loc)
        tinctest.logger.info(
            'Result without force option to activate standby %s' % result)
        if (rc != 0) and result.find('Force activation required') != -1:
            tinctest.logger.info(
                'activating standby failed, try force activation...')
            gpactivate_cmd = 'gpactivatestandby -a -f -d %s %s' % (standby_loc,
                                                                   option)
            (rc, result) = self.run_remote(standby_host,
                                           gpactivate_cmd,
                                           pgport=standby_port,
                                           standbydd=standby_loc)
            if (rc != 0):
                tinctest.logger.error('Force activating standby failed!')
                return False
        tinctest.logger.info('standby acvitated, host value %s' % standby_host)
        return True

    def remove_standby(self):
        return self.gpinit.run(option='-r')

    def failback_to_original_master(self):
        # Check if master is running.
        bashCmd = (
            self.gphome
        ) + '/bin/pg_ctl status -D $MASTER_DATA_DIRECTORY | grep \'pg_ctl: server is running\''
        cmd = Command(name='Running cmd %s' % bashCmd,
                      cmdStr="source %s/greenplum_path.sh; %s" %
                      (self.gphome, bashCmd))
        try:
            cmd.run()
        except Exception, e:
            tinctest.logger.error("Error running command %s\n" % e)
            return

        result = cmd.get_results()
        out = result.stdout
        if not out:
            tinctest.logger.info('Start the old master again ...')
            master = gp.MasterStart("Starting orig Master",
                                    self.orig_master.datadir,
                                    self.orig_master.port,
                                    self.orig_master.dbid, 0, self.numcontent,
                                    None, None, None)
            master.run(validateAfter=True)
            result = master.get_results()
            tinctest.logger.info('orig Master started result : %s' %
                                 result.stdout)
            if result.rc != 0:
                raise WalReplException(
                    'Unable to start original master process')
            Command('gpinitstandby -ra', 'gpinitstandby -ra').run()
            # failing back to old master, it takes a little bit to prepare the cluster ready for connection
            if os.path.exists(local_path('drop_filespace.sql')):
                PSQL.run_sql_file(local_path('drop_filespace.sql'),
                                  dbname=self.db_name)
示例#12
0
class GpinitStandby(object):
    '''Class for gpinitstandby operations 
       Disclaimer: Some of these may repeat with the mpp/lib version'''
    def __init__(self):
        self.stdby = StandbyVerify()
        self.runmixin = StandbyRunMixin()
        self.runmixin.createdb(dbname='walrepl')
        self.mdd = os.environ.get('MASTER_DATA_DIRECTORY')
        self.config = GPDBConfig()
        self.pgutil = GpUtility()
        self.host = socket.gethostname()

    def run(self, option=''):
        '''Runs gpinitstandby and returns True if successfull '''
        gpinitstandby_cmd = 'gpinitstandby -a %s' % option
        cmd = Command(name='Running Gpinitstandby',
                      cmdStr="%s" % gpinitstandby_cmd)
        tinctest.logger.info(" %s" % cmd)
        cmd.run(validateAfter=False)
        result = cmd.get_results()
        if result.rc != 0:
            return False
        return True

    def verify_gpinitstandby(self, primary_pid):
        '''Verify the presence of standby in recovery mode '''
        if (self.stdby.check_gp_segment_config()) and (
                self.stdby.check_pg_stat_replication()) and (
                    self.stdby.check_standby_processes()
                ) and self.compare_primary_pid(primary_pid):
            return True
        return False

    def get_masterhost(self):
        std_sql = "select hostname from gp_segment_configuration where content=-1 and role='p';"
        master_host = PSQL.run_sql_command(std_sql,
                                           flags='-q -t',
                                           dbname='postgres')
        return master_host.strip()

    def get_standbyhost(self):
        std_sql = "select hostname from gp_segment_configuration where content='-1' and role='m';"
        standby_host = PSQL.run_sql_command(std_sql,
                                            flags='-q -t',
                                            dbname='postgres')
        return standby_host.strip()

    def get_filespace_location(self):
        fs_sql = "select fselocation from pg_filespace_entry where fselocation like '%fs_walrepl_a%' and fsedbid=1;"
        filespace_loc = PSQL.run_sql_command(fs_sql,
                                             flags='-q -t',
                                             dbname='postgres')
        return filespace_loc.strip()

    def get_standbyhostnode(self):
        '''
        Function used to obtain the hostname of one of the segment node inorder to use it as the standby master node" 
        @return : returns the hostname of the segment node which can be used as the standby master node
        '''
        hostlist = self.config.get_hosts()
        standby = ''
        for host in hostlist:
            if host.strip() != self.host:
                standby = host.strip()
        if len(standby) > 0:
            return standby
        else:
            tinctest.logger.error(
                'No segment host other than master available to have remote standby'
            )

    def get_primary_pid(self):
        pid = self.pgutil.get_pid_by_keyword(pgport=os.environ.get('PGPORT'),
                                             keyword=self.mdd)
        if int(pid) == -1:
            raise WalReplException(
                'Unable to get pid of primary master process')
        else:
            return int(pid)

    def compare_primary_pid(self, initial_pid):
        final_pid = self.get_primary_pid()
        if initial_pid == final_pid:
            return True
        return False

    def create_dir_on_standby(self, standby, location):
        fs_cmd = "gpssh -h %s -e 'rm -rf %s; mkdir -p %s' " % (
            standby, location, location)
        cmd = Command(
            name='Make dierctory on standby before running the command',
            cmdStr=fs_cmd)
        tinctest.logger.info('%s' % cmd)
        cmd.run(validateAfter=True)
        result = cmd.get_results()
        if result.rc != 0:
            raise WalReplException('Unable to create directory on standby')
        else:
            return True

    def initstand_by_with_default(self):
        master_host = self.get_masterhost()
        gp_cmd = "/bin/bash -c 'gpinitstandby -s %s'" % (master_host)
        cmd = Command(name='Running the command', cmdStr=gp_cmd)
        tinctest.logger.info('%s' % cmd)
        cmd.run(validateAfter=False)
        sleep(2)
        result = cmd.get_results()
        lines = result.stdout.splitlines()
        for line in lines:
            if 'Data directory already exists' in line:
                return True
        return False

    def init_with_prompt(self, filespace_loc):
        standby = self.get_standbyhostnode()
        gp_cmd = "/bin/bash -c 'gpinitstandby -s %s -a'" % (standby)
        logfile = open(local_path('install2.log'), 'w')

        child = pexpect.spawn(gp_cmd, timeout=400)
        child.logfile = logfile
        sleep(5)
        check = child.expect(
            ['.* Enter standby filespace location for filespace.*', ' '])
        child.sendline(filespace_loc)

        sleep(10)
        check = child.expect(['.*Successfully created standby master.*'])
        if check != 0:
            tinctest.logger.error('gpinitstandy failed')
            return False
        child.close()
        return True
示例#13
0
class BkupRestore(MPPTestCase):

    dbname = 'bkdb'
    standby_port = '5433'
    standby_dirname = '_newstandby'

    def __init__(self, methodName):
        self.gpact = GpactivateStandby()
        self.gpinit = GpinitStandby()
        self.runmixin = StandbyRunMixin()
        self.runmixin.createdb(dbname=self.dbname)
        self.bkup_timestamp = ""
        self.gphome = os.environ.get('GPHOME')
        self.mdd = os.environ.get('MASTER_DATA_DIRECTORY')
        self.pgport = os.environ.get('PGPORT')
        self.host = socket.gethostname()
        self.standby_loc = os.path.split(self.mdd)[0] + self.standby_dirname
        super(BkupRestore, self).__init__(methodName)

    def createdb(self):
        PSQL.run_sql_command('Drop database %s;Create database %s;' %
                             (self.dbname, self.dbname),
                             dbname='postgres')

    def run_backup(self):
        #Cleanup db_dumps folder before running backup
        cleanup_cmd = "gpssh -h %s -e 'rm -rf /tmp/db_dumps'" % (self.host)
        cmd = Command('cleanup', cleanup_cmd)
        cmd.run(validateAfter=False)
        gpc_cmd = 'gpcrondump -a -x %s -u /tmp' % self.dbname
        cmd = Command(name='Run gpcrondump', cmdStr=gpc_cmd)
        cmd.run(validateAfter=True)
        result = cmd.get_results()
        if result.rc != 0:
            return False
        else:
            self.bkup_timestamp = self.get_timestamp(result.stdout)
            return True

    def validate_timestamp(self, ts):
        try:
            int_ts = int(ts)
        except Exception as e:
            raise Exception('Timestamp is not valid %s' % ts)

        if len(ts) != 14:
            raise Exception('Timestamp is invalid %s' % ts)

    def get_timestamp(self, result):
        for line in result.splitlines():
            if 'Timestamp key = ' in line:
                log_msg, delim, timestamp = line.partition('=')
                ts = timestamp.strip()
                self.validate_timestamp(ts)
                return ts
        raise Exception('Timestamp key not found')

    def run_restore(self):
        gpr_cmd = 'gpdbrestore -t %s -a -u /tmp' % self.bkup_timestamp
        (rc, result) = self.run_remote(self.host,
                                       gpr_cmd,
                                       pgport=self.standby_port,
                                       standbydd=self.standby_loc)
        if rc != 0:
            return False
        return True

    def create_standby(self, local=True):
        ''' Create a standby '''
        gputil.create_dir(self.host, self.standby_loc)
        gputil.clean_dir(self.host, self.standby_loc)
        self.gpinit.run(option='-P %s -s %s -F pg_system:%s' %
                        (self.standby_port, self.host, self.standby_loc))

    def run_remote(self, standbyhost, rmt_cmd, pgport='', standbydd=''):
        '''Runs remote command and returns rc, result '''
        export_cmd = "source %s/greenplum_path.sh;export PGPORT=%s;export MASTER_DATA_DIRECTORY=%s" % (
            self.gphome, pgport, standbydd)
        remote_cmd = "gpssh -h %s -e '%s;%s' " % (standbyhost, export_cmd,
                                                  rmt_cmd)
        cmd = Command(name='Running Remote command', cmdStr='%s' % remote_cmd)
        tinctest.logger.info(" %s" % cmd)
        cmd.run(validateAfter=False)
        result = cmd.get_results()
        return result.rc, result.stdout

    def run_workload(self, dir, verify=False):
        tinctest.logger.info("Running workload ...")
        load_path = local_path(dir) + os.sep
        for file in os.listdir(load_path):
            if file.endswith(".sql"):
                out_file = file.replace(".sql", ".out")
                PSQL.run_sql_file(sql_file=load_path + file,
                                  dbname=self.dbname,
                                  port=self.pgport,
                                  out_file=load_path + out_file)
        if verify == True:
            self.validate_sql_files(load_path)

    def validate_sql_files(self, load_path):
        for file in os.listdir(load_path):
            if file.endswith(".out"):
                out_file = file
                ans_file = file.replace('.out', '.ans')
                if os.path.exists(load_path + ans_file):
                    assert Gpdiff.are_files_equal(load_path + out_file,
                                                  load_path + ans_file)
                else:
                    raise Exception("No .ans file exists for %s " % out_file)

    def failback(self):
        gputil.failback_to_original_master(self.mdd, self.host,
                                           self.standby_loc, self.standby_port)
示例#14
0
class GpactivateStandby(object):
    '''Class for gpactivatestandby operations '''
    
    standby_port = '5656'
    db_name = 'walrepl'

    def __init__(self):
        self.gpinit = GpinitStandby()
        self.pgutil = GpUtility()
        self.runmixin = StandbyRunMixin()
        self.runmixin.createdb(dbname='walrepl')
        self.gphome = os.environ.get('GPHOME')
        self.pgport = os.environ.get('PGPORT')
        self.mdd = os.environ.get('MASTER_DATA_DIRECTORY')
        self.config = GPDBConfig()
        self.host = socket.gethostname()

        dburl = dbconn.DbURL()
        gparray = GpArray.initFromCatalog(dburl, utility=True)
        self.numcontent = gparray.getNumSegmentContents()
        self.orig_master = gparray.master

    def run_remote(self, standbyhost, rmt_cmd, pgport = '', standbydd = ''):
        '''Runs remote command and returns rc, result '''
        export_cmd = "source %s/greenplum_path.sh;export PGPORT=%s;export MASTER_DATA_DIRECTORY=%s" % (self.gphome, pgport, standbydd) 
        remote_cmd = "gpssh -h %s -e '%s; %s'" % (standbyhost, export_cmd, rmt_cmd)
        cmd = Command(name='Running Remote command', cmdStr='%s' % remote_cmd)
        tinctest.logger.info(" %s" % cmd)
        cmd.run(validateAfter=False)
        result = cmd.get_results()
        return result.rc,result.stdout


    def activate(self, option=''):
        ''' Stop the master and activate current standby to master'''
        standby_host = self.get_current_standby() 
        standby_port = self.get_standby_port()
        standby_loc = self.get_standby_dd()

        self.run_remote(self.host, 'gpstop -aim', pgport=self.pgport, standbydd=self.mdd)

        gpactivate_cmd = 'gpactivatestandby -a -d %s %s' %(standby_loc, option)
        (rc, result) = self.run_remote(standby_host, gpactivate_cmd, pgport = standby_port, standbydd=standby_loc)
        tinctest.logger.info('Result without force option to activate standby %s'%result)
        if (rc != 0) and result.find('Force activation required') != -1:
            tinctest.logger.info('activating standby failed, try force activation...')
            gpactivate_cmd = 'gpactivatestandby -a -f -d %s %s' %(standby_loc, option)
            (rc, result) = self.run_remote(standby_host, gpactivate_cmd, pgport = standby_port, standbydd=standby_loc)
            if (rc != 0):
                tinctest.logger.error('Force activating standby failed!')
                return False
        tinctest.logger.info('standby acvitated, host value %s' % standby_host)
        return True 

    def remove_standby(self):
        return self.gpinit.run(option='-r')

    def failback_to_original_master(self):
        # Check if master is running.
        bashCmd = (self.gphome)+'/bin/pg_ctl status -D $MASTER_DATA_DIRECTORY | grep \'pg_ctl: server is running\''
        cmd = Command(name='Running cmd %s'%bashCmd, cmdStr="source %s/greenplum_path.sh; %s" % (self.gphome,bashCmd))
        try:
            cmd.run()
        except Exception, e:
            tinctest.logger.error("Error running command %s\n" % e)
            return
        
        result = cmd.get_results()
        out = result.stdout
        if not out:
            tinctest.logger.info('Start the old master again ...')
            master = gp.MasterStart("Starting orig Master", self.orig_master.datadir, self.orig_master.port, self.orig_master.dbid, 0, self.numcontent, None, None, None)
            master.run(validateAfter=True)
            result = master.get_results()
            tinctest.logger.info ('orig Master started result : %s' % result.stdout)
            if result.rc != 0:
                raise WalReplException('Unable to start original master process')
            Command('gpinitstandby -ra', 'gpinitstandby -ra').run()
            # failing back to old master, it takes a little bit to prepare the cluster ready for connection
            if os.path.exists(local_path('drop_filespace.sql')):
                PSQL.run_sql_file(local_path('drop_filespace.sql'), dbname=self.db_name)
示例#15
0
class GpinitStandby(object):
    '''Class for gpinitstandby operations 
       Disclaimer: Some of these may repeat with the mpp/lib version'''
    def __init__(self):
        self.stdby = StandbyVerify()
        self.runmixin = StandbyRunMixin()
        self.runmixin.createdb(dbname='walrepl')        
        self.mdd = os.environ.get('MASTER_DATA_DIRECTORY')
        self.config = GPDBConfig()
        self.pgutil = GpUtility()
        self.host = socket.gethostname()
 
    def run(self, option = ''):
        '''Runs gpinitstandby and returns True if successfull '''
        gpinitstandby_cmd = 'gpinitstandby -a %s' % option
        cmd = Command(name='Running Gpinitstandby', cmdStr="%s" % gpinitstandby_cmd)
        tinctest.logger.info(" %s" % cmd)
        cmd.run(validateAfter=False)
        result = cmd.get_results()
        if result.rc != 0:
            return False
        return True

    def verify_gpinitstandby(self, primary_pid):  
        '''Verify the presence of standby in recovery mode '''
        if (self.stdby.check_gp_segment_config()) and (self.stdby.check_pg_stat_replication()) and (self.stdby.check_standby_processes())and self.compare_primary_pid(primary_pid) :
            return True
        return False

    def get_masterhost(self):
        std_sql = "select hostname from gp_segment_configuration where content=-1 and role='p';"
        master_host = PSQL.run_sql_command(std_sql, flags = '-q -t', dbname= 'postgres')
        return master_host.strip()

    def get_standbyhost(self):
        std_sql = "select hostname from gp_segment_configuration where content='-1' and role='m';"
        standby_host = PSQL.run_sql_command(std_sql, flags = '-q -t', dbname= 'postgres')
        return standby_host.strip()

    def get_filespace_location(self):
        fs_sql = "select fselocation from pg_filespace_entry where fselocation like '%fs_walrepl_a%' and fsedbid=1;"
        filespace_loc = PSQL.run_sql_command(fs_sql, flags = '-q -t', dbname= 'postgres')
        return filespace_loc.strip()

    def get_standbyhostnode(self):
        '''
        Function used to obtain the hostname of one of the segment node inorder to use it as the standby master node" 
        @return : returns the hostname of the segment node which can be used as the standby master node
        '''
        hostlist = self.config.get_hosts()
        standby = ''
        for host in hostlist:
            if host.strip() != self.host:
                standby = host.strip()
        if len(standby) > 0 :
            return standby
        else:
            tinctest.logger.error('No segment host other than master available to have remote standby')

    def get_primary_pid(self):
        pid = self.pgutil.get_pid_by_keyword(pgport=os.environ.get('PGPORT'), keyword=self.mdd)
        if int(pid) == -1:
            raise WalReplException('Unable to get pid of primary master process')
        else:
            return int(pid)

    def compare_primary_pid(self, initial_pid):
        final_pid = self.get_primary_pid()
        if initial_pid == final_pid :
            return True
        return False

    def create_dir_on_standby(self, standby, location):
        fs_cmd = "gpssh -h %s -e 'rm -rf %s; mkdir -p %s' " % (standby, location, location)
        cmd = Command(name='Make dierctory on standby before running the command', cmdStr = fs_cmd)
        tinctest.logger.info('%s' % cmd)
        cmd.run(validateAfter=True)
        result = cmd.get_results()
        if result.rc != 0:
            raise WalReplException('Unable to create directory on standby')
        else:
            return True
      
    def initstand_by_with_default(self):
        master_host = self.get_masterhost()
        gp_cmd =  "/bin/bash -c 'gpinitstandby -s %s'" % (master_host)
        logfile = open(local_path('install.log'),'w')

        child = pexpect.spawn(gp_cmd, timeout=400)
        child.logfile = logfile
        sleep(2)
        check = child.expect(['.* Enter standby filespace location for filespace pg_system .*', ' '])
        if check != 0:
            child.close()

        l_file = open(local_path('install.log'),'r')
        lines = l_file.readlines()
        for line in lines:
            if 'default: NA' in line:
                return True
        return False

    def init_with_prompt(self,filespace_loc):
        standby = self.get_standbyhostnode() 
        gp_cmd =  "/bin/bash -c 'gpinitstandby -s %s -a'" % (standby)
        logfile = open(local_path('install2.log'),'w')

        child = pexpect.spawn(gp_cmd, timeout=400)
        child.logfile = logfile
        sleep(5)
        check = child.expect(['.* Enter standby filespace location for filespace.*', ' '])
        child.sendline(filespace_loc)

        sleep(10)
        check = child.expect(['.*Successfully created standby master.*'])
        if check != 0:
            tinctest.logger.error('gpinitstandy failed')
            return False
        child.close()
        return True
示例#16
0
文件: __init__.py 项目: 50wu/gpdb
class BkupRestore(MPPTestCase):
    
    dbname = 'bkdb'
    standby_port = '5433'
    standby_dirname = '_newstandby'

    def __init__(self,methodName):
        self.gpact = GpactivateStandby()
        self.gpinit = GpinitStandby()
        self.runmixin = StandbyRunMixin()
        self.runmixin.createdb(dbname=self.dbname)
        self.bkup_timestamp = ""
        self.gphome = os.environ.get('GPHOME')
        self.mdd = os.environ.get('MASTER_DATA_DIRECTORY')
        self.pgport = os.environ.get('PGPORT')
        self.host = socket.gethostname()
        self.standby_loc = os.path.split(self.mdd)[0]+self.standby_dirname
        super(BkupRestore,self).__init__(methodName)

    def createdb(self):
        PSQL.run_sql_command('Drop database %s;Create database %s;' %(self.dbname, self.dbname), dbname='postgres')

    def run_backup(self):
        #Cleanup db_dumps folder before running backup
        cleanup_cmd = "gpssh -h %s -e 'rm -rf /tmp/db_dumps'" % (self.host)
        cmd = Command('cleanup', cleanup_cmd)
        cmd.run(validateAfter=False)
        gpc_cmd = 'gpcrondump -a -x %s -u /tmp' % self.dbname
        cmd = Command(name='Run gpcrondump', cmdStr=gpc_cmd)
        cmd.run(validateAfter=True)
        result = cmd.get_results()
        if result.rc != 0:
            return False
        else:
            self.bkup_timestamp = self.get_timestamp(result.stdout)
            return True

    def validate_timestamp(self, ts):
        try:
            int_ts = int(ts)
        except Exception as e:
            raise Exception('Timestamp is not valid %s' % ts)

        if len(ts) != 14:
            raise Exception('Timestamp is invalid %s' % ts) 

    def get_timestamp(self, result):
        for line in result.splitlines():
            if 'Timestamp key = ' in line:
                log_msg, delim, timestamp = line.partition('=') 
                ts = timestamp.strip()
                self.validate_timestamp(ts)
                return ts
        raise Exception('Timestamp key not found')

    def run_restore(self):
        gpr_cmd = 'gpdbrestore -t %s -a -u /tmp' % self.bkup_timestamp
        (rc, result) = self.run_remote(self.host, gpr_cmd, pgport=self.standby_port, standbydd=self.standby_loc)
        if rc != 0:
            return False
        return True

    def create_standby(self, local=True):
        ''' Create a standby '''
        gputil.create_dir(self.host,self.standby_loc)
        gputil.clean_dir(self.host,self.standby_loc)
        self.gpinit.run(option = '-P %s -s %s -F pg_system:%s' % (self.standby_port, self.host, self.standby_loc))
        
    def run_remote(self, standbyhost, rmt_cmd, pgport = '', standbydd = ''):
        '''Runs remote command and returns rc, result '''
        export_cmd = "source %s/greenplum_path.sh;export PGPORT=%s;export MASTER_DATA_DIRECTORY=%s" % (self.gphome, pgport, standbydd)
        remote_cmd = "gpssh -h %s -e '%s;%s' " % (standbyhost, export_cmd, rmt_cmd)
        cmd = Command(name='Running Remote command', cmdStr='%s' % remote_cmd)
        tinctest.logger.info(" %s" % cmd)
        cmd.run(validateAfter=False)
        result = cmd.get_results()
        return result.rc,result.stdout

    def run_workload(self, dir, verify = False):
        tinctest.logger.info("Running workload ...")
        load_path = local_path(dir) + os.sep
        for file in os.listdir(load_path):
            if file.endswith(".sql"):
                out_file = file.replace(".sql", ".out")
                PSQL.run_sql_file(sql_file = load_path + file, dbname = self.dbname, port = self.pgport, out_file = load_path + out_file)
        if verify == True:
            self.validate_sql_files(load_path)

    def validate_sql_files(self, load_path):
        for file in os.listdir(load_path):
            if file.endswith(".out"):
                out_file = file
                ans_file = file.replace('.out' , '.ans')
                if os.path.exists(load_path + ans_file):
                    assert Gpdiff.are_files_equal(load_path + out_file, load_path + ans_file)
                else:
                    raise Exception("No .ans file exists for %s " % out_file)

    def failback(self):
        gputil.failback_to_original_master(self.mdd, self.host, self.standby_loc, self.standby_port)