Exemplo n.º 1
0
 def execute(self):
     execname = os.path.split(sys.argv[0])[-1]
     pickled_execname = pickle.dumps(execname)
     pickled_operation = pickle.dumps(self.operation)
     cmd = Command('pickling an operation',
                   '$GPHOME/sbin/gpoperation.py',
                   ctxt=REMOTE,
                   remoteHost=self.host,
                   stdin=pickled_execname + pickled_operation)
     cmd.run(validateAfter=True)
     logger.debug(cmd.get_results().stdout)
     ret = self.operation.ret = pickle.loads(cmd.get_results().stdout)
     if isinstance(ret, Exception):
         raise ret
     return ret
Exemplo n.º 2
0
    def execute(self):
        entry = ValidateVerificationEntry(token=self.token).run()
        if not entry['verdone']:
            raise WrongStateError(
                "Only finished verification tasks may be cleaned up.")

        path = os.path.join(get_masterdatadir(), 'pg_verify', self.token)
        Command('cleanup', 'rm -rf %s' % path).run(validateAfter=True)
        #RemoveTree(path).run()

        to_clean = ValidateVerification(content=entry['vercontent'],
                                        primaries_only=False).run()
        pool = WorkerPool(min(len(to_clean), self.batch_default))
        for seg in to_clean:
            host = seg.getSegmentHostName()
            path = os.path.join(seg.getSegmentDataDirectory(), 'pg_verify',
                                "*%s*" % self.token)
            cmd = Command('cleanup', 'rm -f %s' % path, remoteHost=host)
            pool.addCommand(cmd)

        logger.info('Waiting for clean commands to complete...')
        pool.wait_and_printdots(len(to_clean))

        for cmd in pool.getCompletedItems():
            res = cmd.get_results()
            if not res.wasSuccessful():
                logger.error('Failed to send cleanup on %s' % cmd.host)
                logger.error('Error: %s' % res.stderr)
                raise CleanVerificationError()
        RemoveVerificationEntry(token=self.token).run()
        logger.info('Verification %s has been cleaned.' % self.token)
Exemplo n.º 3
0
def get_info():
    """
    Get the current platform
    @return: type platform of the current system
    @rtype : String
    """
    myos = platform.system()
    if myos == "Darwin":
        return 'OSX'
    elif myos == "Linux":
        if os.path.exists("/etc/SuSE-release"):
            return 'SUSE'
        elif os.path.exists("/etc/redhat-release"):
            cmd_str = "cat /etc/redhat-release"
            cmd = Command("run cat for RHEL version", cmd_str)
            cmd.run()
            result = cmd.get_results()
            msg = result.stdout
            if msg.find("5") != -1:
                return 'RHEL5'
            else:
                return 'RHEL6'
    elif myos == "SunOS":
        return 'SOL'
    return None
Exemplo n.º 4
0
    def run(self):
        if self.build:
            BuildGppkg(self.build).run()
            return

        #Check for RPM and Solaris OS
        if curr_platform == SUNOS:
            raise ExceptionNoStackTraceNeeded(
                'gppkg is not supported on Solaris')

        try:
            cmd = Command(name='Check for rpm', cmdStr='rpm --version')
            cmd.run(validateAfter=True)
            results = cmd.get_results().stdout.strip()
            rpm_version_string = results.split(' ')[-1]

            if not rpm_version_string.startswith('4.'):
                raise ExceptionNoStackTraceNeeded(
                    'gppkg requires rpm version 4.x')

        except ExecutionError, ex:
            results = ex.cmd.get_results().stderr.strip()
            if len(results) != 0 and 'not found' in results:
                raise ExceptionNoStackTraceNeeded(
                    'gppkg requires RPM to be available in PATH')
Exemplo n.º 5
0
    def drop_database(dbname, retries = 5, sleep_interval = 5):
        """
        Execute dropdb against the given database.

        @type dbname: string
        @param dbname: Name of the database to be deleted

        @type retires: integer
        @param retries: Number of attempts to drop the database.

        @type sleep_interval: integer
        @param sleep_interval: Time in seconds between retry attempts

        @rtype: boolean
        @return: True if successful, False otherwise

        @raise PSQLException: When the database does not exist
        """
        # TBD: Use shell when available
        if not PSQL.database_exists(dbname):
            tinctest.logger.error("Database %s does not exist." %dbname)
            raise PSQLException('Database %s does not exist' %dbname)
        cmd = Command(name='drop database', cmdStr='dropdb %s' %(dbname))
        tinctest.logger.debug("Dropping database: %s" %cmd)
        count = 0
        while count < retries:
            cmd.run(validateAfter = False)
            result = cmd.get_results()
            tinctest.logger.debug("Output - %s" %result)
            if result.rc == 0 and not result.stderr:
                return True
            time.sleep(sleep_interval)
            count += 1
        return False
Exemplo n.º 6
0
    def check_gpfdist_process(self, wait=60, port=None, raise_assert=True):
        """
        Check for the gpfdist process
        Wait at least 60s until gpfdist starts, else raise an exception
        @var wait: wait at least 60s for gpfdist
        @var port: Port Number
        @var raise_assert: raise gpfdist error by default
        """
        if port is None:
            port = self.port
        process_started = False
        count = 0
        while (not process_started and count < wait):
            cmd_str = "%s -ef | grep \"gpfdist -p %s\" | grep -v grep" % (
                self.ps_command, port)
            cmd = Command(name='check for gpfdist', cmdStr=cmd_str)
            cmd.run()

            content = cmd.get_results().stdout
            if len(content) > 0:
                if content.find("gpfdist -p %s" % port) > 0:
                    process_started = self.is_gpfdist_connected(port)
                    if process_started:
                        return True
            count = count + 1
            time.sleep(1)
        if raise_assert:
            raise GpfdistError("Could not start gpfdist process")
        else:
            print "Could not start gpfdist process"
Exemplo n.º 7
0
def run_gppkg(pgport, gphome, mdd, loc, options="--install"):
    gppkg_cmd = "export PGPORT=%s; export MASTER_DATA_DIRECTORY=%s; source %s/greenplum_path.sh; gppkg %s %s" % (
        pgport, mdd, gphome, options, loc)
    cmd = Command(name="Run gppkg", cmdStr=gppkg_cmd)
    cmd.run(validateAfter=True)
    result = cmd.get_results()
    return result.stdout
Exemplo n.º 8
0
 def execute(self):
     cmd = Command(name=self.__class__.__name__,
                   cmdStr=self.cmd_str,
                   ctxt=REMOTE,
                   remoteHost=self.host)
     cmd.run(validateAfter=True)
     # TODO! If exception is raised remotely, there's no stdout, thereby causing a pickling error.
     return pickle.loads(cmd.get_results().stdout)
Exemplo n.º 9
0
 def execute(self):
     dburl = dbconn.DbURL(port=self.master_port)
     if self.ddboost:
         cmd = Command('List directories in DDBoost db_dumps dir',
                       'gpddboost --listDir --dir=db_dumps/ | grep ^[0-9]')
         cmd.run(validateAfter=False)
         rc = cmd.get_results().rc
         if rc != 0:
             logger.info("Cannot find old backup sets to remove on DDboost")
             return
         old_dates = cmd.get_results().stdout.splitlines()
     else:
         old_dates = ListFiles(os.path.join(self.master_datadir,
                                            DUMP_DIR)).run()
     try:
         old_dates.remove(DUMP_DATE)
     except ValueError, e:  # DUMP_DATE was not found in old_dates
         pass
Exemplo n.º 10
0
def run_command(cmd_str):
    cmd = Command("Local Command", cmd_str)
    cmd.run(validateAfter=True)
    results = cmd.get_results()

    if results.rc != 0:
        return results.stderr.strip()
    else:
        return results.stdout.strip()
Exemplo n.º 11
0
def get_package_name(name):
    """
    Get RPM package name
    @param dir: directory
    @param name: rpm packagge 
    """
    rpm_cmd = 'rpm -qp %s' % (name)
    cmd = Command(name='get rpm package name', cmdStr=rpm_cmd)
    cmd.run(validateAfter=True)
    result = cmd.get_results()
    return result.stdout
Exemplo n.º 12
0
def run_shell_command(cmdstr, cmdname='shell command'):
    cmd = Command(cmdname, cmdstr)
    tinctest.logger.info('Executing command: %s : %s' % (cmdname, cmdstr))
    cmd.run()
    result = cmd.get_results()
    tinctest.logger.info('Finished command execution with return code %s ' %
                         (str(result.rc)))
    tinctest.logger.debug('stdout: ' + result.stdout)
    tinctest.logger.debug('stderr: ' + result.stderr)
    if result.rc != 0:
        return False
    return True
Exemplo n.º 13
0
    def execute(self):
        real_filename = os.path.join(
            self.master_datadir, DUMP_DIR, self.restore_timestamp[0:8],
            "%s%s" % (MASTER_DBDUMP_PREFIX, self.restore_timestamp))
        fake_filename = os.path.join(
            self.master_datadir, DUMP_DIR, self.fake_timestamp[0:8],
            "%s%s" % (MASTER_DBDUMP_PREFIX, self.fake_timestamp))
        real_file, fake_file = None, None
        try:
            if self.compress:
                real_filename += '.gz'
                fake_file = gzip.open(fake_filename + '.gz', 'w')
            else:
                fake_file = open(fake_filename, 'w')

            cmd = Command(
                'Copying master dump file from DDBoost',
                'gpddboost --readFile --from-file=%s | head -n 20' %
                (real_filename))

            cmd.run(validateAfter=True)
            line_list = cmd.get_results().stdout.splitlines()
            # TODO: copy over data among the first 20 lines that begin with 'SET'. Why 20? See gpdbrestore.sh:1025.
            # e.g.
            #  1 --
            #  2 -- Greenplum Database database dump
            #  3 --
            #  4
            #  5 SET statement_timeout = 0;
            #  6 SET client_encoding = 'UTF8';
            #  7 SET standard_conforming_strings = off;
            #  8 SET check_function_bodies = false;
            #  9 SET client_min_messages = warning;
            # 10 SET escape_string_warning = off;
            # 11
            # 12 SET default_with_oids = false;
            # 13
            # 14 --
            # 15 -- Name: SCHEMA public; Type: COMMENT; Schema: -; Owner: ashwin
            # 16 --
            # 17
            # 18 COMMENT ON SCHEMA public IS 'Standard public schema';
            # 19
            # 20
            for line in line_list:
                if line.startswith("SET"):
                    fake_file.write(line)
                if lineno > 20:
                    break
        except Exception, e:
            logger.exception('Master dump file build failed.')
            raise ExceptionNoStackTraceNeeded(
                'Master dump file build failed, review log file for details')
Exemplo n.º 14
0
 def run(self,option=' '):
     '''
     @type option: string
     @param option: gprecoverseg option (-F or -r)
     ''' 
     if option not in ('-F' , '-r', ' '):
         raise GpRecoversegException('Not a valid option with gprecoverseg')
     rcvr_cmd = 'gprecoverseg -a  %s' % option
     cmd = Command(name='Run gprecoverseg', cmdStr='source %s/greenplum_path.sh;%s' % (self.gphome, rcvr_cmd))
     tinctest.logger.info("Running gprecoverseg : %s" % cmd)
     cmd.run(validateAfter=True)
     result = cmd.get_results()
     if result.rc != 0 or result.stderr:
         return False
     return True
Exemplo n.º 15
0
 def inject_fault(self, y = None, f = None, r ='mirror', seg_id = None, H='ALL', m ='async', sleeptime = None, o =None, p=None, outfile=None):
     '''
     PURPOSE : 
         Inject the fault using gpfaultinjector
     @param 
         y : suspend/resume/reset/panic/fault
         f : Name of the faulti
         outfile : output of the command is placed in this file
         rest_of_them : same as in gpfaultinjector help
     '''
     if (not y) or (not f) :
         raise Exception("Need a value for type and name to continue")
     
     if(not os.getenv('MASTER_DATA_DIRECTORY')):
          raise Exception('MASTER_DATA_DIRECTORY environment variable is not set.')
     
 
     fault_cmd = "gpfaultinjector  -f %s -m %s -y %s " % (f, m, y )
     if seg_id :
         fault_cmd = fault_cmd + " -s %s" % seg_id
     if sleeptime :
         fault_cmd = fault_cmd + " -z %s" % sleeptime
     if o:
         fault_cmd = fault_cmd + " -o %s" % o
     if p :
         fault_cmd = fault_cmd + " -p %s" % p
     if seg_id is None :
         fault_cmd = fault_cmd + " -H %s -r %s" % (H, r) 
     if sleeptime :
         fault_cmd = fault_cmd + " --sleep_time_s %s " % sleeptime
     if outfile !=  None:
         fault_cmd = fault_cmd + ">" +outfile 
         
     cmd = Command('fault_command', fault_cmd)
     cmd.run()
     result = cmd.get_results()
     if result.rc != 0 and  y != 'status':
         ok = False
         out = result.stderr
     else:
         ok =  True
         out = result.stdout
     
     if not ok and y != 'status':
         raise Exception("Failed to inject fault %s to %s" % (f,y))
     else:
         tinctest.logger.info('Injected fault %s ' % fault_cmd)
         return (ok,out)
Exemplo n.º 16
0
    def is_port_released(self, port=None):
        """
        Check whether the port is released after stopping gpfdist
        @var port: Port Number
        @return: True or False
        """
        if port is None:
            port = self.port
        cmd_str = "netstat -an |grep '*.%s'" % (port)
        cmd = Command(name='check gpfdist is released', cmdStr=cmd_str)
        cmd.run()

        results = cmd.get_results()
        if len(results.stdout) > 0 and results.rc == 1:
            return False
        return True
Exemplo n.º 17
0
 def tearDown(self):
     """
     The method that sub-classes must implement as a cleanup for the database
     Typically called when setup fails midway
     
     @return: Nothing if drop db succeeded.  Raises TINCDatagenException if dropdb failed
     """
     output = PSQL.run_sql_command(
         "select 'command_found_' || datname from pg_database where datname like '"
         + self.db_name + "'")
     if 'command_found_' + self.db_name in output:
         cmd = Command('dropdb', "dropdb " + self.db_name)
         cmd.run(validateAfter=True)
         result = cmd.get_results()
         if result.rc != 0:
             raise TINCDatagenException('dropdb failed')
Exemplo n.º 18
0
    def run(self):
        """
        Run gpinitsystem
        rc=0, gpinitsystem has no warning(s) or error(s)
        rc=1, gpinitsystem has warning(s) but no error(s)
        rc=2, gpinitsystem has error(s)
        """
        self.create_datadir()
        cmd = Command(name='run gpinitsystem', cmdStr='source %s/%s; gpinitsystem -a -c %s' %\
              (self.dir, self.source_path, self.config_file))
        cmd.run()
        result = cmd.get_results()

        if result.rc > 1:
            msg = "stdout:%s\nstderr:%s" % (result.stdout, result.stderr)
            raise GpinitsystemError("gpinitsystem failed (%d): %s" %
                                    (result.rc, msg))

        logger.debug("Successfully ran gpinitsystem ...")
Exemplo n.º 19
0
 def setUp(self):
     """
     The method that sub-classes must implement to setup a particular database.
     
     @rtype: boolean
     @return: True if db is already present; False if it is not present a new db was created
              Raises TINCDatagenException if db creation failed
     """
     # Assume setup is done if db exists
     output = PSQL.run_sql_command(
         "select 'command_found_' || datname from pg_database where datname like '"
         + self.db_name + "'")
     if 'command_found_' + self.db_name in output:
         return True
     cmd = Command('createdb', "createdb " + self.db_name)
     cmd.run(validateAfter=True)
     result = cmd.get_results()
     if result.rc != 0:
         raise TINCDatagenException('createdb failed')
     return False
Exemplo n.º 20
0
    def is_gpfdist_connected(self, port=None):
        """
        Check gpfdist by connecting after starting process
        @var port: Port Number
        @return: True or False
        """
        if port is None:
            port = self.port

        url = "http://%s:%s" % (self.hostname, port)
        if self.secure:
            url = url.replace("http:", "https:") + " -k"

        cmd_str = "curl %s" % (url)
        cmd = Command(name='check gpfdist is connected', cmdStr=cmd_str)
        cmd.run()

        content = cmd.get_results().stderr
        if content.find("couldn't") >= 0 or content.find(
                "Failed to connect") >= 0:
            return False
        return True
Exemplo n.º 21
0
    def drop_caches(remoteHost='localhost'):
        """
        Refresh the system caches
        rc=0, drop caches successfully with no warning(s) or error(s)
        rc=1, drop caches successfully with warning(s) but no error(s)
        rc=2, drop caches with error(s), raise TINCSystemException

        @type remoteHost: string
        @param remoteHost: Host name of the machine where drop caches should be executed
        """
        cmdStr = "echo 'echo 3  > /proc/sys/vm/drop_caches' |sudo -s"
        cmd = Command('drop caches',
                      cmdStr,
                      ctxt=REMOTE,
                      remoteHost=remoteHost)
        cmd.run()
        result = cmd.get_results()
        if result.rc > 1:
            msg = "drop caches failed with rc=%s and stderr=%s" % \
                    (result.rc, result.stderr)
            tinctest.logger.warning(msg)
            raise TINCSystemException(msg)
        tinctest.logger.info("drop caches success with %s" % result)
Exemplo n.º 22
0
    def create_database(dbname):
        """
        Create a database with the given database name.

        @type dbname: string
        @param dbname: Name of the database to be created

        @rtype: boolean
        @return: True if successful, False otherwise

        raise PSQLException: When the database already exists.
        """
        # TBD: Use shell when available
        if PSQL.database_exists(dbname):
            raise PSQLException("Database %s already exists" %dbname)
        cmd = Command(name='drop database', cmdStr='createdb %s' %(dbname))
        tinctest.logger.debug("Creating database: %s" %cmd)
        cmd.run(validateAfter = False)
        result = cmd.get_results()
        tinctest.logger.debug("Output - %s" %result)
        if result.rc != 0 or result.stderr:
            return False
        return True
Exemplo n.º 23
0
    def execute(self):
        (restore_timestamp, restore_db, compress) = ValidateTimestamp(
            master_datadir=self.master_datadir,
            candidate_timestamp=self.restore_timestamp,
            ddboost=self.ddboost).run()
        dump_file = os.path.join(
            self.master_datadir, DUMP_DIR, restore_timestamp[0:8],
            "%s%s" % (MASTER_DBDUMP_PREFIX, restore_timestamp))
        if compress:
            dump_file += '.gz'

        if self.ddboost:
            from_file = os.path.join(
                DUMP_DIR, restore_timestamp[0:8],
                "%s%s" % (MASTER_DBDUMP_PREFIX, restore_timestamp))
            if compress:
                from_file += '.gz'
            ret = []
            schema = ''
            owner = ''
            if compress:
                cmd = Command(
                    'DDBoost copy of master dump file',
                    'gpddboost --readFile --from-file=%s | gunzip | grep -e "SET search_path = " -e "-- Data for Name: " -e "COPY "'
                    % (from_file))
            else:
                cmd = Command(
                    'DDBoost copy of master dump file',
                    'gpddboost --readFile --from-file=%s | grep -e "SET search_path = " -e "-- Data for Name: " -e "COPY "'
                    % (from_file))

            # TODO: The code below is duplicated from the code for local restore
            #       We need to refactor this. Probably use the same String IO interfaces
            #       to extract lines in both the cases.
            cmd.run(validateAfter=True)
            line_list = cmd.get_results().stdout.splitlines()
            for line in line_list:
                if line.startswith("SET search_path = "):
                    line = line[len("SET search_path = "):]
                    if ", pg_catalog;" in line:
                        schema = line[:line.index(", pg_catalog;")]
                    else:
                        schema = "pg_catalog"
                elif line.startswith("-- Data for Name: "):
                    owner = line[line.index("; Owner: ") + 9:].rstrip()
                elif line.startswith("COPY "):
                    table = line[5:]
                    if table.rstrip().endswith(") FROM stdin;"):
                        if table.startswith("\""):
                            table = table[:table.index("\" (") + 1]
                        else:
                            table = table[:table.index(" (")]
                    else:
                        table = table[:table.index(" FROM stdin;")]
                    table = table.rstrip()
                    ret.append((schema, table, owner))
            return ret
        else:
            f = None
            schema = ''
            owner = ''
            ret = []
            try:
                if compress:
                    f = gzip.open(dump_file, 'r')
                else:
                    f = open(dump_file, 'r')

                while True:
                    line = f.readline()
                    if not line:
                        break
                    if line.startswith("SET search_path = "):
                        line = line[len("SET search_path = "):]
                        if ", pg_catalog;" in line:
                            schema = line[:line.index(", pg_catalog;")]
                        else:
                            schema = "pg_catalog"
                    elif line.startswith("-- Data for Name: "):
                        owner = line[line.index("; Owner: ") + 9:].rstrip()
                    elif line.startswith("COPY "):
                        table = line[5:]
                        if table.rstrip().endswith(") FROM stdin;"):
                            if table.startswith("\""):
                                table = table[:table.index("\" (") + 1]
                            else:
                                table = table[:table.index(" (")]
                        else:
                            table = table[:table.index(" FROM stdin;")]
                        table = table.rstrip()
                        ret.append((schema, table, owner))
            finally:
                if f is not None:
                    f.close()
            return ret
Exemplo n.º 24
0
                "Could not detect one of the supported products (gpdb, gpdb or postgres) in your environment. Make sure your environment is set correctly."
            )
            raise MPPTestCaseException(
                "Could not detect one of the supported products (gpdb, gpdb or postgres) in your environment. Make sure your environment is set correctly."
            )

        match_object = re.search("\((.+)\)", version_string_information)
        database_match = match_object.group(0)

        if "HAWQ" in database_match:
            self.product = 'gpdb'
            # Replace version_string_information to point to gpdb-version
            version_command = Command(name='get gpdb-version',
                                      cmdStr='postgres --gpdb-version')
            version_command.run(validateAfter=True)
            version_string_information = version_command.get_results().stdout
            tinctest.logger.info(
                "DUT is detected to be gpdb. Version string: %s" %
                version_string_information)
        elif "Greenplum Database" in database_match:
            tinctest.logger.info(
                "DUT is detected to be gpdb. Version string: %s" %
                version_string_information)
            self.product = 'gpdb'
        elif "PostgreSQL" in database_match:
            tinctest.logger.info(
                "DUT is detected to be postgres. Version string: %s" %
                version_string_information)
            self.product = 'postgres'
        else:
            tinctest.logger.critical(
Exemplo n.º 25
0
        except UnexpectedRowsError, e:
            logger.exception(
                "Disk space queries have failed. Cannot estimate disk space needed for dump."
            )
            raise ExceptionNoStackTraceNeeded(
                "Cannot estimate disk space needed for dump. Use -b to override this check."
            )
        finally:
            if conn is not None:
                conn.close()
        if self.compress:
            needed_space = needed_space / COMPRESSION_FACTOR

        cmd = Command('Querying free disk space', 'df -k %s' % self.datadir)
        cmd.run()
        lines = cmd.get_results().stdout.split('\n')
        columns = lines[1].split()
        free_space = int(columns[3])

        if free_space == 0 or (free_space - needed_space
                               ) / free_space < self.free_space_percent / 100:
            logger.error("Disk space: [Need: %dK, Free %dK]" %
                         (needed_space, free_space))
            raise NotEnoughDiskSpace(free_space, needed_space)
        logger.info("Disk space: [Need: %dK, Free %dK]" %
                    (needed_space, free_space))


class NotEnoughDiskSpace(Exception):
    def __init__(self, free_space, needed_space):
        self.free_space, self.needed_space = free_space, needed_space
Exemplo n.º 26
0
    def execute(self):
        self.exclude_dump_tables = ValidateDumpDatabase(
            dump_database=self.dump_database,
            dump_schema=self.dump_schema,
            include_dump_tables=self.include_dump_tables,
            exclude_dump_tables=self.exclude_dump_tables,
            backup_dir=self.backup_dir,
            report_dir=self.report_dir,
            free_space_percent=self.free_space_percent,
            compress=self.compress,
            batch_default=self.batch_default,
            master_datadir=self.master_datadir,
            master_port=self.master_port).run()

        if self.backup_dir is not None:
            dump_path = os.path.join(self.backup_dir, DUMP_DIR, DUMP_DATE)
        else:
            dump_path = os.path.join(DUMP_DIR, DUMP_DATE)
        if self.report_dir is not None:
            report_path = os.path.join(self.report_dir, DUMP_DIR, DUMP_DATE)
        else:
            report_path = os.path.join(self.master_datadir, DUMP_DIR,
                                       DUMP_DATE)
        dump_line = "gp_dump -p %d -U %s --gp-d=%s --gp-r=%s --gp-s=p" % (
            self.master_port, getUserName(), dump_path, report_path)
        if self.ddboost:
            dump_line += " --ddboost"
        if self.clear_catalog_dumps:
            dump_line += " -c"
        if self.compress:
            logger.info("Adding compression parameter")
            dump_line += " --gp-c"
        if self.encoding is not None:
            logger.info("Adding encoding %s" % self.encoding)
            dump_line += " --encoding=%s" % self.encoding
        """
        AK: Some ridiculous escaping here. I apologize.
        These options get passed-through gp_dump to gp_dump_agent.
        Commented out lines use escaping that would be reasonable, if gp_dump escaped properly.
        """
        if self.dump_schema is not None:
            logger.info("Adding schema name %s" % self.dump_schema)
            dump_line += " -n \"\\\"%s\\\"\"" % self.dump_schema
            #dump_line += " -n \"%s\"" % self.dump_schema
        dump_line += " %s" % self.dump_database
        for dump_table in self.include_dump_tables:
            schema, table = dump_table.split('.')
            dump_line += " --table=\"\\\"%s\\\"\".\"\\\"%s\\\"\"" % (schema,
                                                                     table)
            #dump_line += " --table=\"%s\".\"%s\"" % (schema, table)
        for dump_table in self.exclude_dump_tables:
            schema, table = dump_table.split('.')
            dump_line += " --exclude-table=\"\\\"%s\\\"\".\"\\\"%s\\\"\"" % (
                schema, table)
            #dump_line += " --exclude-table=\"%s\".\"%s\"" % (schema, table)
        for opt in self.output_options:
            dump_line += " %s" % opt
        logger.info("Dump command line %s" % dump_line)
        logger.info("Starting dump process")
        start = datetime.now()
        cmd = Command('Invoking gp_dump', dump_line)
        cmd.run()
        rc = cmd.get_results().rc
        if INJECT_GP_DUMP_FAILURE is not None:
            rc = INJECT_GP_DUMP_FAILURE
        if rc != 0:
            logger.warn("Dump process returned exit code %d" % rc)
        else:
            logger.info("Dump process returned exit code 0")
        end = datetime.now()
        return {
            'timestamp_start': start.strftime("%Y%m%d%H%M%S"),
            'time_start': start.strftime("%H:%M:%S"),
            'time_end': end.strftime("%H:%M:%S"),
            'exit_status': rc
        }