Example #1
0
def parseargs(args):
    global logger

    pguser = os.environ.get("PGUSER") or unix.getUserName()
    pghost = os.environ.get("PGHOST") or unix.getLocalHostname()
    pgport = os.environ.get("PGPORT") or 5432

    parser = OptParser(option_class=OptChecker)
    parser.remove_option('-h')
    parser.add_option('-?', '--help', '-h', action='store_true', default=False)
    parser.add_option('-n', '--host', default=pghost)
    parser.add_option('-p', '--port', default=pgport)
    parser.add_option('-u', '--username', default=pguser)
    parser.add_option('-w', '--password', default=False, action='store_true')
    parser.add_option('-v', '--verbose', default=False, action='store_true')
    parser.add_option('-q', '--quiet', default=True, action='store_true')

    (options, args) = parser.parse_args()

    if options.help:
        print __doc__
        sys.exit(1)
    try:
        options.port = int(options.port)
    except:
        logger.error("Invalid PORT: '%s'" % options.port)
        sys.exit(1)

    if options.verbose:
        gplog.enable_verbose_logging()
    elif options.quiet:
        gplog.quiet_stdout_logging()

    return options
Example #2
0
def test_main( testName, newProgramArgs, createOptionParserFn, createCommandFn, extraOutputGenerators, expectedOutput) :
    global gTestOutput

    # update args
    previousArgs = sys.argv
    sys.argv = []
    sys.argv.append(getProgramName())
    sys.argv.extend(newProgramArgs)

    # register command factory
    setExecutionContextFactory(TestExecutionContextFactory())

    commandObject=None
    parser = None

    startTest(testName)

    try:
        gplog.setup_tool_logging(gProgramName,unix.getLocalHostname(),unix.getUserName(),nonuser=False)

        parser = createOptionParserFn()
        (options, args) = parser.parse_args()
        gplog.enable_verbose_logging()

        commandObject = createCommandFn(options, args)
        exitCode = commandObject.run()

        testOutput("sys.exit %s" % exitCode)

    except ProgramArgumentValidationException, e:
        testOutput( "Validation error: %s" % e.getMessage())
Example #3
0
def test_main(testName, newProgramArgs, createOptionParserFn, createCommandFn,
              extraOutputGenerators, expectedOutput):
    global gTestOutput

    # update args
    previousArgs = sys.argv
    sys.argv = []
    sys.argv.append(getProgramName())
    sys.argv.extend(newProgramArgs)

    # register command factory
    setExecutionContextFactory(TestExecutionContextFactory())

    commandObject = None
    parser = None

    startTest(testName)

    try:
        gplog.setup_tool_logging(gProgramName,
                                 unix.getLocalHostname(),
                                 unix.getUserName(),
                                 nonuser=False)

        parser = createOptionParserFn()
        (options, args) = parser.parse_args()
        gplog.enable_verbose_logging()

        commandObject = createCommandFn(options, args)
        exitCode = commandObject.run()

        testOutput("sys.exit %s" % exitCode)

    except ProgramArgumentValidationException, e:
        testOutput("Validation error: %s" % e.getMessage())
Example #4
0
def parseargs(args):
    global logger

    pguser = os.environ.get("PGUSER") or unix.getUserName()
    pghost = os.environ.get("PGHOST") or unix.getLocalHostname()
    pgport = os.environ.get("PGPORT") or 5432

    parser = OptParser(option_class=OptChecker)
    parser.remove_option('-h')
    parser.add_option('-?', '--help', '-h', action='store_true', default=False)
    parser.add_option('-n', '--host', default=pghost)
    parser.add_option('-p', '--port', default=pgport)
    parser.add_option('-u', '--username', default=pguser)
    parser.add_option('-w', '--password', default=False, action='store_true')
    parser.add_option('-v', '--verbose', default=False, action='store_true')
    parser.add_option('-q', '--quiet', default=True, action='store_true')

    (options, args) = parser.parse_args()

    if options.help:
        print __doc__
        sys.exit(1)
    try:
        options.port = int(options.port)
    except:
        logger.error("Invalid PORT: '%s'" % options.port)
        sys.exit(1)

    if options.verbose:
        gplog.enable_verbose_logging()
    elif options.quiet:
        gplog.quiet_stdout_logging()

    return options
Example #5
0
    def config_primaries_for_replication(self, gpArray):
        logger.info(
            "Starting to modify pg_hba.conf on primary segments to allow replication connections"
        )
        replicationStr = ". {0}/greenplum_path.sh; echo 'host  replication {1} samenet trust {2}' >> {3}/pg_hba.conf; pg_ctl -D {3} reload"

        try:
            for segmentPair in gpArray.getSegmentList():
                allow_pair_hba_line_entries = []
                if self.__options.hba_hostnames:
                    mirror_hostname, _, _ = socket.gethostbyaddr(
                        segmentPair.mirrorDB.getSegmentHostName())
                    hba_line_entry = "\nhost all {0} {1} trust".format(
                        unix.getUserName(), mirror_hostname)
                    allow_pair_hba_line_entries.append(hba_line_entry)
                else:
                    mirror_ips = unix.InterfaceAddrs.remote(
                        'get mirror ips',
                        segmentPair.mirrorDB.getSegmentHostName())
                    for ip in mirror_ips:
                        cidr_suffix = '/128' if ':' in ip else '/32'
                        cidr = ip + cidr_suffix
                        hba_line_entry = "\nhost all {0} {1} trust".format(
                            unix.getUserName(), cidr)
                        allow_pair_hba_line_entries.append(hba_line_entry)
                cmdStr = replicationStr.format(
                    os.environ["GPHOME"], unix.getUserName(),
                    " ".join(allow_pair_hba_line_entries),
                    segmentPair.primaryDB.datadir)
                logger.debug(cmdStr)
                cmd = Command(name="append to pg_hba.conf",
                              cmdStr=cmdStr,
                              ctxt=base.REMOTE,
                              remoteHost=segmentPair.primaryDB.hostname)
                cmd.run(validateAfter=True)

        except Exception, e:
            logger.error(
                "Failed while modifying pg_hba.conf on primary segments to allow replication connections: %s"
                % str(e))
            raise
Example #6
0
def create_entries(primary_hostname, mirror_hostname, hba_hostnames):
    # Start with an empty string so that the later .join prepends a newline to the first entry
    entries = ['']
    # Add the samehost replication entry to support single-host development
    entries.append('host replication {username} samehost trust'.format(
        username=unix.getUserName()))
    if hba_hostnames:
        mirror_hostname, _, _ = socket.gethostbyaddr(mirror_hostname)
        entries.append("host all {username} {hostname} trust".format(
            username=unix.getUserName(), hostname=mirror_hostname))
        entries.append("host replication {username} {hostname} trust".format(
            username=unix.getUserName(), hostname=mirror_hostname))
        primary_hostname, _, _ = socket.gethostbyaddr(primary_hostname)
        if mirror_hostname != primary_hostname:
            entries.append(
                "host replication {username} {hostname} trust".format(
                    username=unix.getUserName(), hostname=primary_hostname))
    else:
        segment_pair_ips = gp.IfAddrs.list_addrs(mirror_hostname)
        for ip in segment_pair_ips:
            cidr_suffix = '/128' if ':' in ip else '/32'
            cidr = ip + cidr_suffix
            hba_line_entry = "host all {username} {cidr} trust".format(
                username=unix.getUserName(), cidr=cidr)
            entries.append(hba_line_entry)
        if mirror_hostname != primary_hostname:
            segment_pair_ips.extend(gp.IfAddrs.list_addrs(primary_hostname))
        for ip in segment_pair_ips:
            cidr_suffix = '/128' if ':' in ip else '/32'
            cidr = ip + cidr_suffix
            hba_line_entry = "host replication {username} {cidr} trust".format(
                username=unix.getUserName(), cidr=cidr)
            entries.append(hba_line_entry)
    return entries
Example #7
0
    def config_primaries_for_replication(self, gpArray):
        logger.info("Starting to modify pg_hba.conf on primary segments to allow replication connections")
        replicationStr = ". {0}/greenplum_path.sh; echo 'host  replication {1} samenet trust' >> {2}/pg_hba.conf; pg_ctl -D {2} reload"

        try:
            for segmentPair in gpArray.getSegmentList():
                cmdStr = replicationStr.format(os.environ["GPHOME"], unix.getUserName(), segmentPair.primaryDB.datadir)
                logger.debug(cmdStr)
                cmd = Command(name="append to pg_hba.conf", cmdStr=cmdStr, ctxt=base.REMOTE, remoteHost=segmentPair.primaryDB.hostname)
                cmd.run(validateAfter=True)

        except Exception, e:
            logger.error("Failed while modifying pg_hba.conf on primary segments to allow replication connections: %s" % str(e))
            raise
Example #8
0
    def main(self, file_name, get_cmd_list):
        pool = None
        logger = None
        try:
            options = self.parseargs()
            exec_name = os.path.split(file_name)[-1]
            logger = gplog.setup_tool_logging(exec_name,
                                              unix.getLocalHostname(),
                                              unix.getUserName(),
                                              logdir=options.logfileDirectory)

            if not options.confinfo:
                raise Exception('Missing --confinfo argument.')

            if options.batch_size <= 0:
                logger.warn('batch_size was less than zero.  Setting to 1.')
                options.batch_size = 1

            if options.verbose:
                gplog.enable_verbose_logging()

            # TODO: should we output the name of the exact file?
            logger.info("Starting recovery with args: %s" %
                        ' '.join(sys.argv[1:]))

            seg_recovery_info_list = recoveryinfo.deserialize_recovery_info_list(
                options.confinfo)
            if len(seg_recovery_info_list) == 0:
                raise Exception(
                    'No segment configuration values found in --confinfo argument'
                )

            cmd_list = get_cmd_list(seg_recovery_info_list,
                                    options.forceoverwrite, logger)

            pool = WorkerPool(
                numWorkers=min(options.batch_size, len(cmd_list)))
            self.run_cmd_list(cmd_list, logger, options, pool)

            sys.exit(0)
        except Exception as e:
            if logger:
                logger.error(str(e))
            print(e, file=sys.stderr)
            sys.exit(1)
        finally:
            if pool:
                pool.haltWork()
Example #9
0
def simple_main_locked(createOptionParserFn, createCommandFn, mainOptions):
    """
    Not to be called externally -- use simple_main instead
    """
    logger = gplog.get_default_logger()

    configurationInterface.registerConfigurationProvider(
        configurationImplGpdb.GpConfigurationProviderUsingGpdbCatalog())
    fileSystemInterface.registerFileSystemProvider(
        fileSystemImplOs.GpFileSystemProviderUsingOs())
    osInterface.registerOsProvider(osImplNative.GpOsProviderUsingNative())
    faultProberInterface.registerFaultProber(
        faultProberImplGpdb.GpFaultProberImplGpdb())

    commandObject = None
    parser = None

    forceQuiet = mainOptions is not None and mainOptions.get(
        "forceQuietOutput")
    options = None

    if mainOptions is not None and mainOptions.get("programNameOverride"):
        global gProgramName
        gProgramName = mainOptions.get("programNameOverride")
    suppressStartupLogMessage = mainOptions is not None and mainOptions.get(
        "suppressStartupLogMessage")

    useHelperToolLogging = mainOptions is not None and mainOptions.get(
        "useHelperToolLogging")
    nonuser = True if mainOptions is not None and mainOptions.get(
        "setNonuserOnToolLogger") else False
    exit_status = 1

    try:
        execname = getProgramName()
        hostname = unix.getLocalHostname()
        username = unix.getUserName()

        parser = createOptionParserFn()
        (options, args) = parser.parse_args()

        if useHelperToolLogging:
            gplog.setup_helper_tool_logging(execname, hostname, username)
        else:
            gplog.setup_tool_logging(execname,
                                     hostname,
                                     username,
                                     logdir=options.ensure_value(
                                         "logfileDirectory", None),
                                     nonuser=nonuser)

        if forceQuiet:
            gplog.quiet_stdout_logging()
        else:
            if options.ensure_value("verbose", False):
                gplog.enable_verbose_logging()
            if options.ensure_value("quiet", False):
                gplog.quiet_stdout_logging()

        if options.ensure_value("masterDataDirectory", None) is not None:
            options.master_data_directory = os.path.abspath(
                options.masterDataDirectory)

        if not suppressStartupLogMessage:
            logger.info("Starting %s with args: %s" %
                        (gProgramName, ' '.join(sys.argv[1:])))

        commandObject = createCommandFn(options, args)
        exitCode = commandObject.run()
        exit_status = exitCode

    except ProgramArgumentValidationException, e:
        if e.shouldPrintHelp():
            parser.print_help()
        logger.error("%s: error: %s" % (gProgramName, e.getMessage()))
        exit_status = 2
    def close(self):
        """
        Closes all handles to the logs we're watching.
        """
        for h in self.handles.values():
            h.close()
        self.handles = {}



if __name__ == '__main__':

    # setup gpAdminLogs logging
    execname = os.path.split(sys.argv[0])[-1]
    hostname = unix.getLocalHostname()
    username = unix.getUserName()
    setup_tool_logging(execname, hostname, username)
    logger = get_default_logger()

    # watch syncmaster logs
    if len(sys.argv) > 2 and sys.argv[2] == 'debug':
        logger.info("Checking standby master status")
    watcher = SyncmasterWatcher( sys.argv[1] )
    rc = watcher.monitor_logs()
    watcher.close()

    # report final status
    # logger.info("exiting with %s" % rc)
    sys.exit( rc )
Example #11
0
    def execute(self):
        self.exclude_dump_tables = ValidateDumpDatabase(
            dump_database=self.dump_database,
            dump_schema=self.dump_schema,
            include_dump_tables=self.include_dump_tables,
            exclude_dump_tables=self.exclude_dump_tables,
            include_dump_tables_file=self.include_dump_tables_file[0],
            exclude_dump_tables_file=self.exclude_dump_tables_file[0],
            backup_dir=self.backup_dir,
            report_dir=self.report_dir,
            free_space_percent=self.free_space_percent,
            compress=self.compress,
            batch_default=self.batch_default,
            master_datadir=self.master_datadir,
            master_port=self.master_port).run()

        if self.backup_dir is not None:
            dump_path = os.path.join(self.backup_dir, DUMP_DIR, DUMP_DATE)
        else:
            dump_path = os.path.join(DUMP_DIR, DUMP_DATE)
        if self.report_dir is not None:
            report_path = os.path.join(self.report_dir, DUMP_DIR, DUMP_DATE)
        else:
            report_path = os.path.join(self.master_datadir, DUMP_DIR,
                                       DUMP_DATE)
        dump_line = "gp_dump -p %d -U %s --gp-d=%s --gp-r=%s --gp-s=p" % (
            self.master_port, getUserName(), dump_path, report_path)
        if self.clear_catalog_dumps:
            dump_line += " -c"
        if self.compress:
            logger.info("Adding compression parameter")
            dump_line += " --gp-c"
        if self.encoding is not None:
            logger.info("Adding encoding %s" % self.encoding)
            dump_line += " --encoding=%s" % self.encoding
        """
        AK: Some ridiculous escaping here. I apologize.
        These options get passed-through gp_dump to gp_dump_agent.
        Commented out lines use escaping that would be reasonable, if gp_dump escaped properly.
        """
        if self.dump_schema is not None:
            logger.info("Adding schema name %s" % self.dump_schema)
            dump_line += " -n \"\\\"%s\\\"\"" % self.dump_schema
            #dump_line += " -n \"%s\"" % self.dump_schema
        dump_line += " %s" % self.dump_database
        for dump_table in self.include_dump_tables:
            schema, table = dump_table.split('.')
            dump_line += " --table=\"\\\"%s\\\"\".\"\\\"%s\\\"\"" % (schema,
                                                                     table)
            #dump_line += " --table=\"%s\".\"%s\"" % (schema, table)
        for dump_table in self.exclude_dump_tables:
            schema, table = dump_table.split('.')
            dump_line += " --exclude-table=\"\\\"%s\\\"\".\"\\\"%s\\\"\"" % (
                schema, table)
            #dump_line += " --exclude-table=\"%s\".\"%s\"" % (schema, table)
        if self.include_dump_tables_file[0] is not None:
            dump_line += " --table-file=%s" % self.include_dump_tables_file
        if self.exclude_dump_tables_file[0] is not None:
            dump_line += " --exclude-table-file=%s" % self.exclude_dump_tables_file
        for opt in self.output_options:
            dump_line += " %s" % opt
        logger.info("Dump command line %s" % dump_line)
        logger.info("Starting dump process")
        start = datetime.now()
        cmd = Command('Invoking gp_dump', dump_line)
        cmd.run()
        rc = cmd.get_results().rc
        if INJECT_GP_DUMP_FAILURE is not None:
            rc = INJECT_GP_DUMP_FAILURE
        if rc != 0:
            logger.warn("Dump process returned exit code %d" % rc)
        else:
            logger.info("Dump process returned exit code 0")
        end = datetime.now()
        return {
            'timestamp_start': start.strftime("%Y%m%d%H%M%S"),
            'time_start': start.strftime("%H:%M:%S"),
            'time_end': end.strftime("%H:%M:%S"),
            'exit_status': rc
        }
Example #12
0
    def execute(self):
        self.exclude_dump_tables = ValidateDumpDatabase(dump_database = self.dump_database,
                                                        dump_schema = self.dump_schema,
                                                        include_dump_tables = self.include_dump_tables,
                                                        exclude_dump_tables = self.exclude_dump_tables,
                                                        include_dump_tables_file = self.include_dump_tables_file[0],
                                                        exclude_dump_tables_file = self.exclude_dump_tables_file[0],
                                                        backup_dir = self.backup_dir,
                                                        report_dir = self.report_dir,
                                                        free_space_percent = self.free_space_percent,
                                                        compress = self.compress,
                                                        batch_default = self.batch_default,
                                                        master_datadir = self.master_datadir,
                                                        master_port = self.master_port).run()

        if self.backup_dir is not None:
            dump_path = os.path.join(self.backup_dir, DUMP_DIR, DUMP_DATE) 
        else:
            dump_path = os.path.join(DUMP_DIR, DUMP_DATE)
        if self.report_dir is not None:
            report_path = os.path.join(self.report_dir, DUMP_DIR, DUMP_DATE)
        else:
            report_path = os.path.join(self.master_datadir, DUMP_DIR, DUMP_DATE)
        dump_line = "gp_dump -p %d -U %s --gp-d=%s --gp-r=%s --gp-s=p" % (self.master_port, getUserName(), dump_path, report_path) 
        if self.clear_catalog_dumps:
            dump_line += " -c"
        if self.compress:
            logger.info("Adding compression parameter")
            dump_line += " --gp-c"
        if self.encoding is not None:
            logger.info("Adding encoding %s" % self.encoding)
            dump_line += " --encoding=%s" % self.encoding

        """
        AK: Some ridiculous escaping here. I apologize.
        These options get passed-through gp_dump to gp_dump_agent.
        Commented out lines use escaping that would be reasonable, if gp_dump escaped properly.
        """
        if self.dump_schema is not None:
            logger.info("Adding schema name %s" % self.dump_schema)
            dump_line += " -n \"\\\"%s\\\"\"" % self.dump_schema
            #dump_line += " -n \"%s\"" % self.dump_schema
        dump_line += " %s" % self.dump_database
        for dump_table in self.include_dump_tables:
            schema, table = dump_table.split('.')
            dump_line += " --table=\"\\\"%s\\\"\".\"\\\"%s\\\"\"" % (schema, table)
            #dump_line += " --table=\"%s\".\"%s\"" % (schema, table)
        for dump_table in self.exclude_dump_tables:
            schema, table = dump_table.split('.')
            dump_line += " --exclude-table=\"\\\"%s\\\"\".\"\\\"%s\\\"\"" % (schema, table)
            #dump_line += " --exclude-table=\"%s\".\"%s\"" % (schema, table)
        if self.include_dump_tables_file[0] is not None:
            dump_line += " --table-file=%s" % self.include_dump_tables_file
        if self.exclude_dump_tables_file[0] is not None:
            dump_line += " --exclude-table-file=%s" % self.exclude_dump_tables_file
        for opt in self.output_options:
            dump_line += " %s" % opt
        logger.info("Dump command line %s" % dump_line)
        logger.info("Starting dump process")
        start = datetime.now()
        cmd = Command('Invoking gp_dump', dump_line)
        cmd.run()   
        rc = cmd.get_results().rc
        if INJECT_GP_DUMP_FAILURE is not None:
            rc = INJECT_GP_DUMP_FAILURE
        if rc != 0:
            logger.warn("Dump process returned exit code %d" % rc)
        else:
            logger.info("Dump process returned exit code 0")        
        end = datetime.now()
        return {'timestamp_start': start.strftime("%Y%m%d%H%M%S"),
                'time_start': start.strftime("%H:%M:%S"),
                'time_end': end.strftime("%H:%M:%S"),
                'exit_status': rc}
Example #13
0
def simple_main_locked(createOptionParserFn, createCommandFn, mainOptions):
    """
    Not to be called externally -- use simple_main instead
    """
    logger = gplog.get_default_logger()

    configurationInterface.registerConfigurationProvider(
        configurationImplGpdb.GpConfigurationProviderUsingGpdbCatalog())
    fileSystemInterface.registerFileSystemProvider(fileSystemImplOs.GpFileSystemProviderUsingOs())
    osInterface.registerOsProvider(osImplNative.GpOsProviderUsingNative())
    faultProberInterface.registerFaultProber(faultProberImplGpdb.GpFaultProberImplGpdb())

    commandObject = None
    parser = None

    forceQuiet = mainOptions is not None and mainOptions.get("forceQuietOutput")
    options = None

    if mainOptions is not None and mainOptions.get("programNameOverride"):
        global gProgramName
        gProgramName = mainOptions.get("programNameOverride")
    suppressStartupLogMessage = mainOptions is not None and mainOptions.get("suppressStartupLogMessage")

    useHelperToolLogging = mainOptions is not None and mainOptions.get("useHelperToolLogging")
    nonuser = True if mainOptions is not None and mainOptions.get("setNonuserOnToolLogger") else False
    exit_status = 1

    # NOTE: if this logic is changed then also change test_main in testUtils.py
    try:
        execname = getProgramName()
        hostname = unix.getLocalHostname()
        username = unix.getUserName()

        parser = createOptionParserFn()
        (options, args) = parser.parse_args()

        if useHelperToolLogging:
            gplog.setup_helper_tool_logging(execname, hostname, username)
        else:
            gplog.setup_tool_logging(execname, hostname, username,
                                     logdir=options.ensure_value("logfileDirectory", None), nonuser=nonuser)

        if forceQuiet:
            gplog.quiet_stdout_logging()
        else:
            if options.ensure_value("verbose", False):
                gplog.enable_verbose_logging()
            if options.ensure_value("quiet", False):
                gplog.quiet_stdout_logging()

        if options.ensure_value("masterDataDirectory", None) is not None:
            options.master_data_directory = os.path.abspath(options.masterDataDirectory)

        if not suppressStartupLogMessage:
            logger.info("Starting %s with args: %s" % (gProgramName, ' '.join(sys.argv[1:])))

        commandObject = createCommandFn(options, args)
        exitCode = commandObject.run()
        exit_status = exitCode

    except ProgramArgumentValidationException, e:
        if e.shouldPrintHelp():
            parser.print_help()
        logger.error("%s: error: %s" % (gProgramName, e.getMessage()))
        exit_status = 2
        # syncmaster is running and there are no obvious errors in the log
        logger.info("syncmaster appears ok, pid %s" % pid)
        return 0

    def close(self):
        """
        Closes all handles to the logs we're watching.
        """
        for h in self.handles.values():
            h.close()
        self.handles = {}


if __name__ == '__main__':

    # setup gpAdminLogs logging
    execname = os.path.split(sys.argv[0])[-1]
    hostname = unix.getLocalHostname()
    username = unix.getUserName()
    setup_tool_logging(execname, hostname, username)
    logger = get_default_logger()

    # watch syncmaster logs
    watcher = SyncmasterWatcher(sys.argv[1])
    rc = watcher.monitor_logs()
    watcher.close()

    # report final status
    logger.info("exiting with %s" % rc)
    sys.exit(rc)
Example #15
0
def simple_main_locked(createOptionParserFn, createCommandFn, mainOptions):
    """
    Not to be called externally -- use simple_main instead
    """
    logger = gplog.get_default_logger()

    configurationInterface.registerConfigurationProvider(
        configurationImplGpdb.GpConfigurationProviderUsingGpdbCatalog())
    fileSystemInterface.registerFileSystemProvider(fileSystemImplOs.GpFileSystemProviderUsingOs())
    osInterface.registerOsProvider(osImplNative.GpOsProviderUsingNative())
    faultProberInterface.registerFaultProber(faultProberImplGpdb.GpFaultProberImplGpdb())

    commandObject = None
    parser = None

    forceQuiet = mainOptions is not None and mainOptions.get("forceQuietOutput")
    options = None

    if mainOptions is not None and mainOptions.get("programNameOverride"):
        global gProgramName
        gProgramName = mainOptions.get("programNameOverride")
    suppressStartupLogMessage = mainOptions is not None and mainOptions.get("suppressStartupLogMessage")

    useHelperToolLogging = mainOptions is not None and mainOptions.get("useHelperToolLogging")
    nonuser = True if mainOptions is not None and mainOptions.get("setNonuserOnToolLogger") else False
    exit_status = 1

    try:
        execname = getProgramName()
        hostname = unix.getLocalHostname()
        username = unix.getUserName()

        parser = createOptionParserFn()
        (options, args) = parser.parse_args()

        if useHelperToolLogging:
            gplog.setup_helper_tool_logging(execname, hostname, username)
        else:
            gplog.setup_tool_logging(execname, hostname, username,
                                     logdir=options.ensure_value("logfileDirectory", None), nonuser=nonuser)

        if forceQuiet:
            gplog.quiet_stdout_logging()
        else:
            if options.ensure_value("verbose", False):
                gplog.enable_verbose_logging()
            if options.ensure_value("quiet", False):
                gplog.quiet_stdout_logging()

        if options.ensure_value("coordinatorDataDirectory", None) is not None:
            options.coordinator_data_directory = os.path.abspath(options.coordinatorDataDirectory)

        if not suppressStartupLogMessage:
            logger.info("Starting %s with args: %s" % (gProgramName, ' '.join(sys.argv[1:])))

        commandObject = createCommandFn(options, args)
        exitCode = commandObject.run()
        exit_status = exitCode

    except ProgramArgumentValidationException as e:
        if e.shouldPrintHelp():
            parser.print_help()
        logger.error("%s: error: %s" % (gProgramName, e.getMessage()))
        exit_status = 2
    except ExceptionNoStackTraceNeeded as e:
        logger.error("%s error: %s" % (gProgramName, e))
        exit_status = 2
    except UserAbortedException as e:
        logger.info("User abort requested, Exiting...")
        exit_status = 4
    except ExecutionError as e:
        logger.fatal("Error occurred: %s\n Command was: '%s'\n"
                     "rc=%d, stdout='%s', stderr='%s'" % \
                     (e.summary, e.cmd.cmdStr, e.cmd.results.rc, e.cmd.results.stdout,
                      e.cmd.results.stderr))
        exit_status = 2
    except Exception as e:
        if options is None:
            logger.exception("%s failed.  exiting...", gProgramName)
        else:
            if options.ensure_value("verbose", False):
                logger.exception("%s failed.  exiting...", gProgramName)
            else:
                logger.fatal("%s failed. (Reason='%s') exiting..." % (gProgramName, e))
        exit_status = 2
    except KeyboardInterrupt:
        exit_status = 2
    finally:
        if commandObject:
            commandObject.cleanup()
    sys.exit(exit_status)
Example #16
0
    def parseargs(self):
        parser = OptParser(option_class=OptChecker,
                           description=' '.join(self.description.split()),
                           version='%prog version $Revision: $')
        parser.set_usage(
            '%prog is a utility script used by gprecoverseg, and gpaddmirrors and is not intended to be run separately.'
        )
        parser.remove_option('-h')

        #TODO we may not need the verbose flag
        parser.add_option('-v',
                          '--verbose',
                          action='store_true',
                          help='debug output.',
                          default=False)
        parser.add_option('-c', '--confinfo', type='string')
        parser.add_option('-b',
                          '--batch-size',
                          type='int',
                          default=DEFAULT_SEGHOST_NUM_WORKERS,
                          metavar='<batch_size>')
        parser.add_option('-f',
                          '--force-overwrite',
                          dest='forceoverwrite',
                          action='store_true',
                          default=False)
        parser.add_option('-l',
                          '--log-dir',
                          dest="logfileDirectory",
                          type="string")
        parser.add_option(
            '',
            '--era',
            dest="era",
            help="coordinator era",
        )

        # Parse the command line arguments
        self.options, _ = parser.parse_args()

        if not self.options.confinfo:
            raise Exception('Missing --confinfo argument.')
        if not self.options.logfileDirectory:
            raise Exception('Missing --log-dir argument.')

        self.logger = gplog.setup_tool_logging(
            os.path.split(self.file_name)[-1],
            unix.getLocalHostname(),
            unix.getUserName(),
            logdir=self.options.logfileDirectory)

        if self.options.batch_size <= 0:
            self.logger.warn('batch_size was less than zero.  Setting to 1.')
            self.options.batch_size = 1

        if self.options.verbose:
            gplog.enable_verbose_logging()

        self.seg_recovery_info_list = recoveryinfo.deserialize_list(
            self.options.confinfo)
        if len(self.seg_recovery_info_list) == 0:
            raise Exception(
                'No segment configuration values found in --confinfo argument')
Example #17
0
    def config_primaries_for_replication(self, gpArray):
        logger.info(
            "Starting to modify pg_hba.conf on primary segments to allow replication connections"
        )

        try:
            for segmentPair in gpArray.getSegmentList():
                # Start with an empty string so that the later .join prepends a newline to the first entry
                entries = ['']
                # Add the samehost replication entry to support single-host development
                entries.append(
                    'host  replication {username} samehost trust'.format(
                        username=unix.getUserName()))
                if self.__options.hba_hostnames:
                    mirror_hostname, _, _ = socket.gethostbyaddr(
                        segmentPair.mirrorDB.getSegmentHostName())
                    entries.append(
                        "host all {username} {hostname} trust".format(
                            username=unix.getUserName(),
                            hostname=mirror_hostname))
                    entries.append(
                        "host replication {username} {hostname} trust".format(
                            username=unix.getUserName(),
                            hostname=mirror_hostname))
                    primary_hostname, _, _ = socket.gethostbyaddr(
                        segmentPair.primaryDB.getSegmentHostName())
                    if mirror_hostname != primary_hostname:
                        entries.append(
                            "host replication {username} {hostname} trust".
                            format(username=unix.getUserName(),
                                   hostname=primary_hostname))
                else:
                    mirror_ips = gp.IfAddrs.list_addrs(
                        segmentPair.mirrorDB.getSegmentHostName())
                    for ip in mirror_ips:
                        cidr_suffix = '/128' if ':' in ip else '/32'
                        cidr = ip + cidr_suffix
                        hba_line_entry = "host all {username} {cidr} trust".format(
                            username=unix.getUserName(), cidr=cidr)
                        entries.append(hba_line_entry)
                    mirror_hostname = segmentPair.mirrorDB.getSegmentHostName()
                    segment_pair_ips = gp.IfAddrs.list_addrs(mirror_hostname)
                    primary_hostname = segmentPair.primaryDB.getSegmentHostName(
                    )
                    if mirror_hostname != primary_hostname:
                        segment_pair_ips.extend(
                            gp.IfAddrs.list_addrs(primary_hostname))
                    for ip in segment_pair_ips:
                        cidr_suffix = '/128' if ':' in ip else '/32'
                        cidr = ip + cidr_suffix
                        hba_line_entry = "host replication {username} {cidr} trust".format(
                            username=unix.getUserName(), cidr=cidr)
                        entries.append(hba_line_entry)
                cmdStr = ". {gphome}/greenplum_path.sh; echo '{entries}' >> {datadir}/pg_hba.conf; pg_ctl -D {datadir} reload".format(
                    gphome=os.environ["GPHOME"],
                    entries="\n".join(entries),
                    datadir=segmentPair.primaryDB.datadir)
                logger.debug(cmdStr)
                cmd = Command(name="append to pg_hba.conf",
                              cmdStr=cmdStr,
                              ctxt=base.REMOTE,
                              remoteHost=segmentPair.primaryDB.hostname)
                cmd.run(validateAfter=True)

        except Exception as e:
            logger.error(
                "Failed while modifying pg_hba.conf on primary segments to allow replication connections: %s"
                % str(e))
            raise

        else:
            logger.info(
                "Successfully modified pg_hba.conf on primary segments to allow replication connections"
            )
    logger.info('Starting expunge commands...')
    for host in hosts:
        hostname, port = host[0], host[1]
        cmd = Command("Expunging index %s on host %s port %d" % (index, hostname, port),
                      EXPUNGE_COMMAND % (hostname, port, index))
        cmd.run(validateAfter=True)
    return

def main():
    options = parseargs()
    database = find_gptext_schema()
    if not database:
        logger.error('Could not find schema %s.' % GPTEXT_SCHEMA)
        logger.error('Use the --database option to specify the database that')
        logger.error('contains the %s schema.' % GPTEXT_SCHEMA)
        sys.exit(1)
    url = dbconn.DbURL(dbname=database)
    validate_solr_up(url)
    validate_index_exists(url, options.index)
    hosts   = get_solr_instances(url)
    if not userinput.ask_yesno(None, "\nContinue with GPText index '%s' expunge?" % options.index, 'N'):
        logger.error ('User asked for termination')
        sys.exit(1)
    gptext_index_expunge(options.index, hosts)
    logger.info('Done')
    logger.info('To check the process use "ps -ef | grep curl" - it will show you all the expunge requests issued')

gplog.setup_tool_logging('gptext_expunge', getLocalHostname(), getUserName())
logger  = gplog.get_default_logger()
main()
Example #19
0
        queries = QUERIES_PYTHON
        if options.gpdbgen == '5':
            queries += QUERIES_PYTHON_5
        if prepare_funcs("python", options.dbname, options.sqldir,
                         options.gpdbgen) < 0:
            return
        run(options.dbname, options.username, options.time, queries, "python")
    if not options.nopython3:
        queries = QUERIES_PYTHON
        if options.gpdbgen == '5':
            queries += QUERIES_PYTHON_5
        if prepare_funcs("python3", options.dbname, options.sqldir,
                         options.gpdbgen) < 0:
            return
        run(options.dbname, options.username, options.time, queries, "python3")
    if not options.nor:
        queries = QUERIES_R
        if options.gpdbgen == '5':
            queries += QUERIES_R_5
        if prepare_funcs("R", options.dbname, options.sqldir,
                         options.gpdbgen) < 0:
            return
        run(options.dbname, options.username, options.time, queries, "R")


if __name__ == '__main__':
    execname = os.path.split(__file__)[-1]
    gplog.setup_tool_logging(execname, getLocalHostname(), getUserName())
    logger = gplog.get_default_logger()
    main()