Example #1
0
    def run(self):
        if self.build:
            BuildGppkg(self.build).run()
            return

        #Check for RPM and Solaris OS
        if curr_platform == SUNOS:
            raise ExceptionNoStackTraceNeeded(
                'gppkg is not supported on Solaris')

        try:
            cmd = Command(name='Check for rpm', cmdStr='rpm --version')
            cmd.run(validateAfter=True)
            results = cmd.get_results().stdout.strip()
            rpm_version_string = results.split(' ')[-1]

            if not rpm_version_string.startswith('4.'):
                raise ExceptionNoStackTraceNeeded(
                    'gppkg requires rpm version 4.x')

        except ExecutionError, ex:
            results = ex.cmd.get_results().stderr.strip()
            if len(results) != 0 and 'not found' in results:
                raise ExceptionNoStackTraceNeeded(
                    'gppkg requires RPM to be available in PATH')
Example #2
0
    def drop_database(dbname, retries = 5, sleep_interval = 5):
        """
        Execute dropdb against the given database.

        @type dbname: string
        @param dbname: Name of the database to be deleted

        @type retires: integer
        @param retries: Number of attempts to drop the database.

        @type sleep_interval: integer
        @param sleep_interval: Time in seconds between retry attempts

        @rtype: boolean
        @return: True if successful, False otherwise

        @raise PSQLException: When the database does not exist
        """
        # TBD: Use shell when available
        if not PSQL.database_exists(dbname):
            tinctest.logger.error("Database %s does not exist." %dbname)
            raise PSQLException('Database %s does not exist' %dbname)
        cmd = Command(name='drop database', cmdStr='dropdb %s' %(dbname))
        tinctest.logger.debug("Dropping database: %s" %cmd)
        count = 0
        while count < retries:
            cmd.run(validateAfter = False)
            result = cmd.get_results()
            tinctest.logger.debug("Output - %s" %result)
            if result.rc == 0 and not result.stderr:
                return True
            time.sleep(sleep_interval)
            count += 1
        return False
Example #3
0
def run_gppkg(pgport, gphome, mdd, loc, options="--install"):
    gppkg_cmd = "export PGPORT=%s; export MASTER_DATA_DIRECTORY=%s; source %s/greenplum_path.sh; gppkg %s %s" % (
        pgport, mdd, gphome, options, loc)
    cmd = Command(name="Run gppkg", cmdStr=gppkg_cmd)
    cmd.run(validateAfter=True)
    result = cmd.get_results()
    return result.stdout
Example #4
0
    def check_gpfdist_process(self, wait=60, port=None, raise_assert=True):
        """
        Check for the gpfdist process
        Wait at least 60s until gpfdist starts, else raise an exception
        @var wait: wait at least 60s for gpfdist
        @var port: Port Number
        @var raise_assert: raise gpfdist error by default
        """
        if port is None:
            port = self.port
        process_started = False
        count = 0
        while (not process_started and count < wait):
            cmd_str = "%s -ef | grep \"gpfdist -p %s\" | grep -v grep" % (
                self.ps_command, port)
            cmd = Command(name='check for gpfdist', cmdStr=cmd_str)
            cmd.run()

            content = cmd.get_results().stdout
            if len(content) > 0:
                if content.find("gpfdist -p %s" % port) > 0:
                    process_started = self.is_gpfdist_connected(port)
                    if process_started:
                        return True
            count = count + 1
            time.sleep(1)
        if raise_assert:
            raise GpfdistError("Could not start gpfdist process")
        else:
            print "Could not start gpfdist process"
Example #5
0
    def execute(self):
        entry = ValidateVerificationEntry(token=self.token).run()
        if not entry['verdone']:
            raise WrongStateError(
                "Only finished verification tasks may be cleaned up.")

        path = os.path.join(get_masterdatadir(), 'pg_verify', self.token)
        Command('cleanup', 'rm -rf %s' % path).run(validateAfter=True)
        #RemoveTree(path).run()

        to_clean = ValidateVerification(content=entry['vercontent'],
                                        primaries_only=False).run()
        pool = WorkerPool(min(len(to_clean), self.batch_default))
        for seg in to_clean:
            host = seg.getSegmentHostName()
            path = os.path.join(seg.getSegmentDataDirectory(), 'pg_verify',
                                "*%s*" % self.token)
            cmd = Command('cleanup', 'rm -f %s' % path, remoteHost=host)
            pool.addCommand(cmd)

        logger.info('Waiting for clean commands to complete...')
        pool.wait_and_printdots(len(to_clean))

        for cmd in pool.getCompletedItems():
            res = cmd.get_results()
            if not res.wasSuccessful():
                logger.error('Failed to send cleanup on %s' % cmd.host)
                logger.error('Error: %s' % res.stderr)
                raise CleanVerificationError()
        RemoveVerificationEntry(token=self.token).run()
        logger.info('Verification %s has been cleaned.' % self.token)
Example #6
0
def get_info():
    """
    Get the current platform
    @return: type platform of the current system
    @rtype : String
    """
    myos = platform.system()
    if myos == "Darwin":
        return 'OSX'
    elif myos == "Linux":
        if os.path.exists("/etc/SuSE-release"):
            return 'SUSE'
        elif os.path.exists("/etc/redhat-release"):
            cmd_str = "cat /etc/redhat-release"
            cmd = Command("run cat for RHEL version", cmd_str)
            cmd.run()
            result = cmd.get_results()
            msg = result.stdout
            if msg.find("5") != -1:
                return 'RHEL5'
            else:
                return 'RHEL6'
    elif myos == "SunOS":
        return 'SOL'
    return None
Example #7
0
def delete_rpm(name):
    """
    Delete RPM package
    @param name: package name
    """
    rpm_cmd = 'sudo rpm -e %s' % (name)
    cmd = Command(name='delete rpm', cmdStr=rpm_cmd)
    cmd.run(validateAfter=True)
Example #8
0
 def execute(self):
     cmd = Command(name=self.__class__.__name__,
                   cmdStr=self.cmd_str,
                   ctxt=REMOTE,
                   remoteHost=self.host)
     cmd.run(validateAfter=True)
     # TODO! If exception is raised remotely, there's no stdout, thereby causing a pickling error.
     return pickle.loads(cmd.get_results().stdout)
Example #9
0
 def test_failed_gpinitsystem(self):
     cmd = Command(name='create folder', cmdStr='mkdir -p %s' % (self.mdd))
     cmd.run()
     try:
         self.gpdbinit.run()
         self.fail("Gpinitystem Failed")
     except:
         pass
Example #10
0
 def test_gpdb_gpinitsystem(self):
     try:
         self.gpdbinit.run()
         cmd = Command(name='run gpstop',
                       cmdStr='export MASTER_DATA_DIRECTORY=%s; gpstop -a' %
                       (self.mdd))
         cmd.run()
     except:
         self.fail("Gpinitsystem Failed")
Example #11
0
def run_command(cmd_str):
    cmd = Command("Local Command", cmd_str)
    cmd.run(validateAfter=True)
    results = cmd.get_results()

    if results.rc != 0:
        return results.stderr.strip()
    else:
        return results.stdout.strip()
Example #12
0
    def execute(self):
        timestamp = datetime.now().strftime("%Y%m%d%H%M%S")
        config_backup_file = "gp_master_config_files_%s.tar" % timestamp
        if self.backup_dir is not None:
            path = os.path.join(self.backup_dir, DUMP_DIR, DUMP_DATE,
                                config_backup_file)
        else:
            path = os.path.join(self.master_datadir, DUMP_DIR, DUMP_DATE,
                                config_backup_file)
        logger.info("Dumping master config files")
        Command("Dumping master configuration files", "tar cf %s %s/*.conf" %
                (path, self.master_datadir)).run(validateAfter=True)
        if self.ddboost:
            abspath = path
            relpath = os.path.join(DUMP_DIR, DUMP_DATE, config_backup_file)
            logger.debug('Copying %s to DDBoost' % abspath)
            cmd = Command(
                'DDBoost copy of %s' % abspath,
                'gpddboost --copyToDDBoost --from-file=%s --to-file=%s' %
                (abspath, relpath))
            cmd.run(validateAfter=True)

        logger.info("Dumping segment config files")
        gparray = GpArray.initFromCatalog(dbconn.DbURL(port=self.master_port),
                                          utility=True)
        primaries = [
            seg for seg in gparray.getDbList()
            if seg.isSegmentPrimary(current_role=True)
        ]
        for seg in primaries:
            config_backup_file = "gp_segment_config_files_0_%d_%s.tar" % (
                seg.getSegmentDbId(), timestamp)
            if self.backup_dir is not None:
                path = os.path.join(self.backup_dir, DUMP_DIR, DUMP_DATE,
                                    config_backup_file)
            else:
                path = os.path.join(seg.getSegmentDataDirectory(), DUMP_DIR,
                                    DUMP_DATE, config_backup_file)
            host = seg.getSegmentHostName()
            Command("Dumping segment config files",
                    "tar cf %s %s/*.conf" %
                    (path, seg.getSegmentDataDirectory()),
                    ctxt=REMOTE,
                    remoteHost=host).run(validateAfter=True)
            if self.ddboost:
                abspath = path
                relpath = os.path.join(DUMP_DIR, DUMP_DATE, config_backup_file)
                logger.debug('Copying %s to DDBoost' % abspath)
                cmd = Command(
                    'DDBoost copy of %s' % abspath,
                    'gpddboost --copyToDDBoost --from-file=%s --to-file=%s' %
                    (abspath, relpath),
                    ctxt=REMOTE,
                    remoteHost=host)
                cmd.run(validateAfter=True)
Example #13
0
def get_package_name(name):
    """
    Get RPM package name
    @param dir: directory
    @param name: rpm packagge 
    """
    rpm_cmd = 'rpm -qp %s' % (name)
    cmd = Command(name='get rpm package name', cmdStr=rpm_cmd)
    cmd.run(validateAfter=True)
    result = cmd.get_results()
    return result.stdout
Example #14
0
def run_shell_command(cmdstr, cmdname='shell command'):
    cmd = Command(cmdname, cmdstr)
    tinctest.logger.info('Executing command: %s : %s' % (cmdname, cmdstr))
    cmd.run()
    result = cmd.get_results()
    tinctest.logger.info('Finished command execution with return code %s ' %
                         (str(result.rc)))
    tinctest.logger.debug('stdout: ' + result.stdout)
    tinctest.logger.debug('stderr: ' + result.stderr)
    if result.rc != 0:
        return False
    return True
Example #15
0
    def execute(self):
        real_filename = os.path.join(
            self.master_datadir, DUMP_DIR, self.restore_timestamp[0:8],
            "%s%s" % (MASTER_DBDUMP_PREFIX, self.restore_timestamp))
        fake_filename = os.path.join(
            self.master_datadir, DUMP_DIR, self.fake_timestamp[0:8],
            "%s%s" % (MASTER_DBDUMP_PREFIX, self.fake_timestamp))
        real_file, fake_file = None, None
        try:
            if self.compress:
                real_filename += '.gz'
                fake_file = gzip.open(fake_filename + '.gz', 'w')
            else:
                fake_file = open(fake_filename, 'w')

            cmd = Command(
                'Copying master dump file from DDBoost',
                'gpddboost --readFile --from-file=%s | head -n 20' %
                (real_filename))

            cmd.run(validateAfter=True)
            line_list = cmd.get_results().stdout.splitlines()
            # TODO: copy over data among the first 20 lines that begin with 'SET'. Why 20? See gpdbrestore.sh:1025.
            # e.g.
            #  1 --
            #  2 -- Greenplum Database database dump
            #  3 --
            #  4
            #  5 SET statement_timeout = 0;
            #  6 SET client_encoding = 'UTF8';
            #  7 SET standard_conforming_strings = off;
            #  8 SET check_function_bodies = false;
            #  9 SET client_min_messages = warning;
            # 10 SET escape_string_warning = off;
            # 11
            # 12 SET default_with_oids = false;
            # 13
            # 14 --
            # 15 -- Name: SCHEMA public; Type: COMMENT; Schema: -; Owner: ashwin
            # 16 --
            # 17
            # 18 COMMENT ON SCHEMA public IS 'Standard public schema';
            # 19
            # 20
            for line in line_list:
                if line.startswith("SET"):
                    fake_file.write(line)
                if lineno > 20:
                    break
        except Exception, e:
            logger.exception('Master dump file build failed.')
            raise ExceptionNoStackTraceNeeded(
                'Master dump file build failed, review log file for details')
Example #16
0
def install_rpm(location, dir, name):
    """
    Install RPM package
    @param location: location of rpm packages
    @param dir: install directory
    @param name: package name
    @return: package name
    """
    package_location = location + '/' + name
    rpm_cmd = 'sudo rpm --force --prefix %s -i %s' % (dir, package_location)
    cmd = Command(name='install rpm', cmdStr=rpm_cmd)
    cmd.run(validateAfter=True)

    return get_package_name(package_location)
Example #17
0
 def run(self,option=' '):
     '''
     @type option: string
     @param option: gprecoverseg option (-F or -r)
     ''' 
     if option not in ('-F' , '-r', ' '):
         raise GpRecoversegException('Not a valid option with gprecoverseg')
     rcvr_cmd = 'gprecoverseg -a  %s' % option
     cmd = Command(name='Run gprecoverseg', cmdStr='source %s/greenplum_path.sh;%s' % (self.gphome, rcvr_cmd))
     tinctest.logger.info("Running gprecoverseg : %s" % cmd)
     cmd.run(validateAfter=True)
     result = cmd.get_results()
     if result.rc != 0 or result.stderr:
         return False
     return True
Example #18
0
 def inject_fault(self, y = None, f = None, r ='mirror', seg_id = None, H='ALL', m ='async', sleeptime = None, o =None, p=None, outfile=None):
     '''
     PURPOSE : 
         Inject the fault using gpfaultinjector
     @param 
         y : suspend/resume/reset/panic/fault
         f : Name of the faulti
         outfile : output of the command is placed in this file
         rest_of_them : same as in gpfaultinjector help
     '''
     if (not y) or (not f) :
         raise Exception("Need a value for type and name to continue")
     
     if(not os.getenv('MASTER_DATA_DIRECTORY')):
          raise Exception('MASTER_DATA_DIRECTORY environment variable is not set.')
     
 
     fault_cmd = "gpfaultinjector  -f %s -m %s -y %s " % (f, m, y )
     if seg_id :
         fault_cmd = fault_cmd + " -s %s" % seg_id
     if sleeptime :
         fault_cmd = fault_cmd + " -z %s" % sleeptime
     if o:
         fault_cmd = fault_cmd + " -o %s" % o
     if p :
         fault_cmd = fault_cmd + " -p %s" % p
     if seg_id is None :
         fault_cmd = fault_cmd + " -H %s -r %s" % (H, r) 
     if sleeptime :
         fault_cmd = fault_cmd + " --sleep_time_s %s " % sleeptime
     if outfile !=  None:
         fault_cmd = fault_cmd + ">" +outfile 
         
     cmd = Command('fault_command', fault_cmd)
     cmd.run()
     result = cmd.get_results()
     if result.rc != 0 and  y != 'status':
         ok = False
         out = result.stderr
     else:
         ok =  True
         out = result.stdout
     
     if not ok and y != 'status':
         raise Exception("Failed to inject fault %s to %s" % (f,y))
     else:
         tinctest.logger.info('Injected fault %s ' % fault_cmd)
         return (ok,out)
Example #19
0
 def tearDown(self):
     """
     The method that sub-classes must implement as a cleanup for the database
     Typically called when setup fails midway
     
     @return: Nothing if drop db succeeded.  Raises TINCDatagenException if dropdb failed
     """
     output = PSQL.run_sql_command(
         "select 'command_found_' || datname from pg_database where datname like '"
         + self.db_name + "'")
     if 'command_found_' + self.db_name in output:
         cmd = Command('dropdb', "dropdb " + self.db_name)
         cmd.run(validateAfter=True)
         result = cmd.get_results()
         if result.rc != 0:
             raise TINCDatagenException('dropdb failed')
Example #20
0
 def execute(self):
     logger.info("Sending mail to %s" % ",".join(self.to_addrs))
     cmd = "/bin/mailx" if curr_platform == SUNOS else findCmdInPath('mail')
     Command(
         'Sending email', 'echo "%s" | %s -s "%s" %s' %
         (self.message, cmd, self.subject, " ".join(self.to_addrs))).run(
             validateAfter=True)
Example #21
0
    def is_port_released(self, port=None):
        """
        Check whether the port is released after stopping gpfdist
        @var port: Port Number
        @return: True or False
        """
        if port is None:
            port = self.port
        cmd_str = "netstat -an |grep '*.%s'" % (port)
        cmd = Command(name='check gpfdist is released', cmdStr=cmd_str)
        cmd.run()

        results = cmd.get_results()
        if len(results.stdout) > 0 and results.rc == 1:
            return False
        return True
Example #22
0
    def run(self):
        """
        Run gpinitsystem
        rc=0, gpinitsystem has no warning(s) or error(s)
        rc=1, gpinitsystem has warning(s) but no error(s)
        rc=2, gpinitsystem has error(s)
        """
        self.create_datadir()
        cmd = Command(name='run gpinitsystem', cmdStr='source %s/%s; gpinitsystem -a -c %s' %\
              (self.dir, self.source_path, self.config_file))
        cmd.run()
        result = cmd.get_results()

        if result.rc > 1:
            msg = "stdout:%s\nstderr:%s" % (result.stdout, result.stderr)
            raise GpinitsystemError("gpinitsystem failed (%d): %s" %
                                    (result.rc, msg))

        logger.debug("Successfully ran gpinitsystem ...")
Example #23
0
 def setUp(self):
     """
     The method that sub-classes must implement to setup a particular database.
     
     @rtype: boolean
     @return: True if db is already present; False if it is not present a new db was created
              Raises TINCDatagenException if db creation failed
     """
     # Assume setup is done if db exists
     output = PSQL.run_sql_command(
         "select 'command_found_' || datname from pg_database where datname like '"
         + self.db_name + "'")
     if 'command_found_' + self.db_name in output:
         return True
     cmd = Command('createdb', "createdb " + self.db_name)
     cmd.run(validateAfter=True)
     result = cmd.get_results()
     if result.rc != 0:
         raise TINCDatagenException('createdb failed')
     return False
Example #24
0
    def stop(self, wait=60, port=None):
        """
        kill the gpfdist process
        @var wait: wait at least 60s for gpfdist
        @var port: Port Number
        @note: previously call cdbfast.GPDFIST.killGpfdist
        """
        if port is None:
            port = self.port
        cmd_str = '%s -ef | grep "gpfdist -p %s" | grep -v grep | awk \'{print $2}\' | xargs kill 2>&1 > /dev/null' % (
            self.ps_command, port)
        cmd = Command(name='stop gpfdist', cmdStr=cmd_str)
        cmd.run()

        is_released = False
        count = 0
        while (not is_released and count < wait):
            is_released = self.is_port_released()
            count = count + 1
            time.sleep(1)
Example #25
0
 def __init__(self,
              out_file,
              ans_file,
              gp_ignore=True,
              ignore_header=True,
              ignore_plans=False,
              match_sub=[]):
     cmd_str = 'gpdiff.pl -w -B -I NOTICE:'
     if ignore_header:
         cmd_str += ' -I GP_IGNORE -gpd_ignore_headers'
     elif gp_ignore:
         cmd_str += ' -I GP_IGNORE'
     cmd_str += ' -gpd_init %s/global_init_file' % (os.path.abspath(
         os.path.dirname(__file__)))
     if ignore_plans:
         cmd_str += ' -gpd_ignore_plans'
     if match_sub:
         cmd_str += ' -gpd_init '
         cmd_str += ' -gpd_init '.join(match_sub)
     cmd_str += ' %s %s' % (out_file, ans_file)
     Command.__init__(self, 'run gpdiff', cmd_str)
Example #26
0
    def is_gpfdist_connected(self, port=None):
        """
        Check gpfdist by connecting after starting process
        @var port: Port Number
        @return: True or False
        """
        if port is None:
            port = self.port

        url = "http://%s:%s" % (self.hostname, port)
        if self.secure:
            url = url.replace("http:", "https:") + " -k"

        cmd_str = "curl %s" % (url)
        cmd = Command(name='check gpfdist is connected', cmdStr=cmd_str)
        cmd.run()

        content = cmd.get_results().stderr
        if content.find("couldn't") >= 0 or content.find(
                "Failed to connect") >= 0:
            return False
        return True
Example #27
0
 def execute(self):
     logger.info("Commencing pg_catalog dump")
     if self.backup_dir is not None:
         global_file = os.path.join(
             self.backup_dir, DUMP_DIR, self.timestamp[0:8],
             "%s%s" % (GLOBAL_PREFIX, self.timestamp))
     else:
         global_file = os.path.join(
             self.master_datadir, DUMP_DIR, self.timestamp[0:8],
             "%s%s" % (GLOBAL_PREFIX, self.timestamp))
     Command('Dump global objects', "pg_dumpall -g --gp-syntax > %s" %
             global_file).run(validateAfter=True)
     if self.ddboost:
         abspath = global_file
         relpath = os.path.join(DUMP_DIR, self.timestamp[0:8],
                                "%s%s" % (GLOBAL_PREFIX, self.timestamp))
         logger.debug('Copying %s to DDBoost' % abspath)
         cmd = Command(
             'DDBoost copy of %s' % abspath,
             'gpddboost --copyToDDBoost --from-file=%s --to-file=%s' %
             (abspath, relpath))
         cmd.run(validateAfter=True)
Example #28
0
    def create_database(dbname):
        """
        Create a database with the given database name.

        @type dbname: string
        @param dbname: Name of the database to be created

        @rtype: boolean
        @return: True if successful, False otherwise

        raise PSQLException: When the database already exists.
        """
        # TBD: Use shell when available
        if PSQL.database_exists(dbname):
            raise PSQLException("Database %s already exists" %dbname)
        cmd = Command(name='drop database', cmdStr='createdb %s' %(dbname))
        tinctest.logger.debug("Creating database: %s" %cmd)
        cmd.run(validateAfter = False)
        result = cmd.get_results()
        tinctest.logger.debug("Output - %s" %result)
        if result.rc != 0 or result.stderr:
            return False
        return True
Example #29
0
    def drop_caches(remoteHost='localhost'):
        """
        Refresh the system caches
        rc=0, drop caches successfully with no warning(s) or error(s)
        rc=1, drop caches successfully with warning(s) but no error(s)
        rc=2, drop caches with error(s), raise TINCSystemException

        @type remoteHost: string
        @param remoteHost: Host name of the machine where drop caches should be executed
        """
        cmdStr = "echo 'echo 3  > /proc/sys/vm/drop_caches' |sudo -s"
        cmd = Command('drop caches',
                      cmdStr,
                      ctxt=REMOTE,
                      remoteHost=remoteHost)
        cmd.run()
        result = cmd.get_results()
        if result.rc > 1:
            msg = "drop caches failed with rc=%s and stderr=%s" % \
                    (result.rc, result.stderr)
            tinctest.logger.warning(msg)
            raise TINCSystemException(msg)
        tinctest.logger.info("drop caches success with %s" % result)
Example #30
0
    def start(self, options="", port=None, raise_assert=True, ssl=None):
        """
        start hosting the data
        @var options: gpfdist options
        @var port: Port Number
        @var raise_assert: raise gpfdist error by default
        @var ssl: enable ssl
        @note: previously call cdbfast.GPDFIST.startGpfdist
        """
        if port is None:
            port = self.port
        else:
            port = str(port)
        if ssl is None:
            ssl = ""
        else:
            self.secure = True
            self.ssl_cert = ssl
            ssl = "--ssl %s" % self.ssl_cert

        cmd_str = "gpfdist -p %s %s %s > /dev/null &" % (port, options, ssl)
        cmd = Command("Run gpfdist", cmdStr=cmd_str)
        cmd.run()
        return self.check_gpfdist_process(port=port, raise_assert=raise_assert)