def main(argv): # get environmental args args = {config_constants.DB_NAME: os.environ.get('PGDATABASE', None), config_constants.DB_USER: os.environ.get('PGUSER', None), config_constants.DB_PASSWORD: os.environ.get('PGPASSWORD', None), config_constants.DB_HOST: os.environ.get('PGHOST', None), config_constants.DB_PORT: os.environ.get('PGPORT', 5439)} # add argparse args args.update(parse_args) if args.get(config_constants.OUTPUT_FILE) is not None: sys.stdout = open(args.get(config_constants.OUTPUT_FILE), 'w') # invoke the main method of the utility result = analyze_vacuum.run_analyze_vacuum(**args) if result is not None: sys.exit(result) else: sys.exit(0)
def event_handler(event, context): current_region = 'us-east-1' if region_key not in os.environ: print("Warning - using default region %s" % current_region) else: current_region = os.environ[region_key] kms_connection = boto3.client('kms', region_name=current_region) # load the configuration file config_location = LOCAL_CONFIG if event is not None and 'ConfigLocation' in event: config_location = event['ConfigLocation'] if config_location.startswith("s3://"): # load the configuration file from S3 s3_client = boto3.client('s3', region_name=current_region) bucket = config_location.replace('s3://', '').split("/")[0] key = config_location.replace('s3://' + bucket + "/", '') obj = s3_client.get_object(Bucket=bucket, Key=key) config_body = obj['Body'].read() config = json.loads(config_body) elif config_location == LOCAL_CONFIG: # load from the local configuration if not os.path.isfile("config.json"): raise Exception("Unable to resolve local config.json file") else: config_file = open("config.json", 'r') config = json.load(config_file) if config is None: raise Exception("No Configuration Found") else: raise Exception("Unsupported configuration location %s" % config_location) config_detail = config["configuration"] # convert the provided configuration into something that the utilities we're calling will understand config_detail = config_constants.normalise_config(config_detail) if config_constants.DEBUG in config_detail and config_detail[ config_constants.DEBUG]: debug = True if debug: print("Using Provided Configuration:") print(config_detail) # KMS crypto authorisation context auth_context = None if config_constants.KMS_AUTH_CONTEXT in config_detail: auth_context = config_detail[config_constants.KMS_AUTH_CONTEXT] # convert to json auth_context = json.loads(auth_context) if debug: print("Using Authorisation Context for decryption") print(auth_context) # resolve password encrypted_password = base64.b64decode( config_detail[config_constants.ENCRYPTED_PASSWORD]) if encrypted_password != "" and encrypted_password is not None: if auth_context is not None: # decrypt the password using KMS use_password = kms_connection.decrypt( CiphertextBlob=encrypted_password, EncryptionContext=auth_context)['Plaintext'] else: # decrypt the password using KMS use_password = kms_connection.decrypt( CiphertextBlob=encrypted_password)['Plaintext'] else: raise Exception( "Unable to run Utilities without a configured Password") config_detail[config_constants.DB_PASSWORD] = use_password run_utilities = [] if event is not None and "ExecuteUtility" in event: if event["ExecuteUtility"] == COLUMN_ENCODING: run_utilities.append(COLUMN_ENCODING) elif event["ExecuteUtility"] == ANALYZE_VACUUM: run_utilities.append(ANALYZE_VACUUM) elif event["ExecuteUtility"] == ANALYZE: run_utilities.append(ANALYZE) elif event["ExecuteUtility"] == VACUUM: run_utilities.append(VACUUM) elif event["ExecuteUtility"] == MONITORING: run_utilities.append(MONITORING) elif 'utilities' in config: # run each utility, if requested if COLUMN_ENCODING in config["utilities"]: run_utilities.append(COLUMN_ENCODING) if ANALYZE_VACUUM in config["utilities"]: run_utilities.append(ANALYZE_VACUUM) if ANALYZE in config["utilities"]: run_utilities.append(ANALYZE) if VACUUM in config["utilities"]: run_utilities.append(VACUUM) if MONITORING in config["utilities"]: run_utilities.append(MONITORING) else: print("No Utilities configured to run. Exiting!") return results = [] for util in run_utilities: if util == COLUMN_ENCODING: print("Running %s" % util) analyze_schema_compression.configure(**config_detail) encoding_result = analyze_schema_compression.run() results.append(encoding_result) elif util == ANALYZE_VACUUM: print("Running %s" % util) analyze_result = analyze_vacuum.run_analyze_vacuum(**config_detail) if analyze_result == 0: results.append("OK") elif util == ANALYZE: print("Running %s" % util) # turn on correct flag config_detail[config_constants.DO_ANALYZE] = True config_detail[config_constants.DO_VACUUM] = False analyze_result = analyze_vacuum.run_analyze_vacuum(**config_detail) if analyze_result == 0: results.append("OK") elif util == VACUUM: print("Running %s" % util) # turn on correct flag config_detail[config_constants.DO_ANALYZE] = False config_detail[config_constants.DO_VACUUM] = True analyze_result = analyze_vacuum.run_analyze_vacuum(**config_detail) if analyze_result == 0: results.append("OK") elif util == MONITORING: print("Running %s" % util) redshift_monitoring.monitor_cluster([config_detail, os.environ]) print("Processing Complete") return results
def main(argv): master_conn = None db_connections = {} db = get_env_var('PGDATABASE', None) db_user = get_env_var('PGUSER', None) db_pwd = get_env_var('PGPASSWORD', None) db_host = get_env_var('PGHOST', None) db_port = get_env_var('PGPORT', 5439) schema_name = 'public' table_name = None blacklisted_tables = None debug = False do_execute = False query_slot_count = 1 ignore_errors = False query_group = None analyze_flag = True vacuum_flag = True require_ssl = False supported_args = """db= db-user= db-pwd= db-host= db-port= schema-name= table-name= blacklisted-tables= require-ssl= debug= output-file= slot-count= ignore-errors= query_group= analyze-flag= vacuum-flag= vacuum-parameter= min-unsorted-pct= max-unsorted-pct= deleted-pct= stats-off-pct= max-table-size-mb= min-interleaved-skew= min-interleaved-cnt=""" # extract the command line arguments try: optlist, remaining = getopt.getopt(argv[1:], "", supported_args.split()) except getopt.GetoptError as err: print(str(err)) usage() output_file = None # parse command line arguments for arg, value in optlist: if arg == "--db": if value == '' or value is None: usage() else: db = value elif arg == "--db-user": if value == '' or value is None: usage() else: db_user = value elif arg == "--db-pwd": if value == '' or value is None: usage() else: db_pwd = value elif arg == "--db-host": if value == '' or value is None: usage() else: db_host = value elif arg == "--db-port": if value != '' and value is not None: db_port = value elif arg == "--require-ssl": if value != '' and value != None: if value.upper() == 'TRUE' or value == '1': require_ssl = True elif arg == "--schema-name": if value != '' and value is not None: schema_name = value elif arg == "--table-name": if value != '' and value is not None: table_name = value elif arg == "--blacklisted-tables": if value != '' and value is not None: blacklisted_tables = value elif arg == "--debug": if value.upper() == 'TRUE': debug = True else: debug = False elif arg == "--output-file": output_file = value elif arg == "--ignore-errors": if value.upper() == 'TRUE': ignore_errors = True else: ignore_errors = False elif arg == "--slot-count": query_slot_count = int(value) elif arg == "--query_group": if value != '' and value is not None: query_group = value elif arg == "--vacuum-flag": if value.upper() == 'FALSE': vacuum_flag = False elif arg == "--analyze-flag": if value.upper() == 'FALSE': analyze_flag = False elif arg == "--vacuum-parameter": if value.upper() == 'SORT ONLY' or value.upper( ) == 'DELETE ONLY' or value.upper() == 'REINDEX': vacuum_parameter = value else: vacuum_parameter = 'FULL' elif arg == "--min-unsorted-pct": if value != '' and value is not None: min_unsorted_pct = value elif arg == "--max-unsorted-pct": if value != '' and value is not None: max_unsorted_pct = value elif arg == "--deleted-pct": if value != '' and value is not None: deleted_pct = value elif arg == "--stats-off-pct": if value != '' and value is not None: stats_off_pct = value elif arg == "--predicate-cols": if value.upper() == 'TRUE': predicate_cols = True else: predicate_cols = False elif arg == "--max-table-size-mb": if value != '' and value is not None: max_table_size_mb = value elif arg == "--min-interleaved-skew": if value != '' and value is not None: min_interleaved_skew = value elif arg == "--min-interleaved-cnt": if value != '' and value is not None: min_interleaved_cnt = value else: assert False, "Unsupported Argument " + arg usage() # Validate that we've got all the args needed if db is None: usage("Missing Parameter 'db'") if db_user is None: usage("Missing Parameter 'db-user'") if db_pwd is None: usage("Missing Parameter 'db-pwd'") if db_host is None: usage("Missing Parameter 'db-host'") if db_port is None: usage("Missing Parameter 'db-port'") if (output_file is not None): sys.stdout = open(output_file, 'w') # invoke the main method of the utility result = analyze_vacuum.run_analyze_vacuum(db_host, db_port, db_user, db_pwd, db, query_group, query_slot_count, vacuum_flag, analyze_flag, schema_name, table_name, blacklisted_tables, ignore_errors, require_ssl) if (result is not None): sys.exit(result) else: sys.exit(0)
def main(argv): supported_args = """db= db-user= db-pwd= db-host= db-port= schema-name= table-name= blacklisted-tables= suppress-cloudwatch= require-ssl= debug= output-file= slot-count= ignore-errors= query_group= analyze-flag= vacuum-flag= vacuum-parameter= min-unsorted-pct= max-unsorted-pct= stats-off-pct= predicate-cols= max-table-size-mb= min-interleaved-skew= min-interleaved-cnt=""" # extract the command line arguments try: optlist, remaining = getopt.getopt(argv[1:], "", supported_args.split()) except getopt.GetoptError as err: print(str(err)) usage() args = {config_constants.DB_NAME: get_env_var('PGDATABASE', None), config_constants.DB_USER: get_env_var('PGUSER', None), config_constants.DB_PASSWORD: get_env_var('PGPASSWORD', None), config_constants.DB_HOST: get_env_var('PGHOST', None), config_constants.DB_PORT: get_env_var('PGPORT', 5439)} # parse command line arguments for arg, value in optlist: if arg == "--db": if value == '': usage() else: args['db'] = value elif arg == "--db-user": if value == '': usage() else: args[config_constants.DB_USER] = value elif arg == "--db-pwd": if value == '': usage() else: args[config_constants.DB_PASSWORD] = value elif arg == "--db-host": if value != '': args[config_constants.DB_HOST] = value elif arg == "--db-port": if value != '' and value is not None: args[config_constants.DB_PORT] = int(value) elif arg == "--require-ssl": if value != '' and value is not None: if value.upper() == 'TRUE' or value == '1': args[config_constants.SSL] = True else: args[config_constants.SSL] = False elif arg == "--schema-name": if value != '' and value is not None: args[config_constants.SCHEMA_NAME] = value elif arg == "--table-name": if value != '' and value is not None: args[config_constants.TABLE_NAME] = value elif arg == "--blacklisted-tables": if value != '' and value is not None: args[config_constants.BLACKLISTED_TABLES] = value elif arg == "--debug": if value.upper() == 'TRUE' or value == '1': args[config_constants.DEBUG] = True elif arg == "--output-file": # open the supplied file path and bind it to stdout sys.stdout = open(value, 'w') elif arg == "--ignore-errors": if value.upper() == 'TRUE' or value == '1': args[config_constants.IGNORE_ERRORS] = True elif arg == "--slot-count": args[config_constants.QUERY_SLOT_COUNT] = int(value) elif arg == "--query_group": if value != '' and value is not None: args[config_constants.QUERY_GROUP] = value elif arg == "--vacuum-flag": if value.upper() == 'TRUE' or value == '1': args[config_constants.DO_VACUUM] = True else: args[config_constants.DO_VACUUM] = False elif arg == "--analyze-flag": if value.upper() == 'TRUE' or value == '1': args[config_constants.DO_ANALYZE] = True else: args[config_constants.DO_ANALYZE] = False elif arg == "--vacuum-parameter": if value.upper() == 'SORT ONLY' or value.upper() == 'DELETE ONLY' or value.upper() == 'REINDEX': args[config_constants.VACUUM_PARAMETER] = value else: args['vacuum_parameter'] = 'FULL' elif arg == "--min-unsorted-pct": if value != '' and value is not None: args[config_constants.MIN_UNSORTED_PCT] = value elif arg == "--max-unsorted-pct": if value != '' and value is not None: args[config_constants.MAX_UNSORTED_PCT] = value elif arg == "--stats-off-pct": if value != '' and value is not None: args[config_constants.STATS_OFF_PCT] = value elif arg == "--predicate-cols": if value.upper() == 'TRUE' or value == '1': args[config_constants.PREDICATE_COLS] = True else: args[config_constants.PREDICATE_COLS] = False elif arg == "--suppress-cloudwatch": if value.upper() == 'TRUE' or value == '1': args[config_constants.SUPPRESS_CLOUDWATCH] = True else: args[config_constants.SUPPRESS_CLOUDWATCH] = False elif arg == "--max-table-size-mb": if value != '' and value is not None: args[config_constants.MAX_TBL_SIZE_MB] = value elif arg == "--min-interleaved-skew": if value != '' and value is not None: args[config_constants.MIN_INTERLEAVED_SKEW] = value elif arg == "--min-interleaved-cnt": if value != '' and value is not None: args[config_constants.MIN_INTERLEAVED_COUNT] = value else: usage("Unsupported Argument " + arg) # Validate that we've got all the args needed if config_constants.DB_NAME not in args: usage("Missing Parameter 'db'") if config_constants.DB_USER not in args: usage("Missing Parameter 'db-user'") if config_constants.DB_PASSWORD not in args: usage("Missing Parameter 'db-pwd'") if config_constants.DB_HOST not in args: usage("Missing Parameter 'db-host'") if config_constants.DB_PORT not in args: usage("Missing Parameter 'db-port'") if config_constants.OUTPUT_FILE in args: sys.stdout = open(args['output_file'], 'w') # invoke the main method of the utility result = analyze_vacuum.run_analyze_vacuum(**args) if result is not None: sys.exit(result) else: sys.exit(0)
def event_handler(event, context): currentRegion = 'us-east-1' try: currentRegion = os.environ[region_key] if currentRegion is None or currentRegion == '': raise KeyError except KeyError: raise Exception("Unable to resolve environment variable %s" % region_key) kmsConnection = boto3.client('kms', region_name=currentRegion) # KMS crypto authorisation context authContext = utils.get_encryption_context(currentRegion) # load the configuration file config_location = LOCAL_CONFIG if 'ConfigLocation' in event: config_location = event['ConfigLocation'] if config_location.startswith("s3://"): s3client = boto3.client('s3', region_name=currentRegion) bucket = config_location.replace('s3://', '').split("/")[0] key = config_location.replace('s3://' + bucket + "/", '') obj = s3client.get_object(Bucket=bucket, Key=key) config = json.loads(obj['Body'].read()) elif config_location == LOCAL_CONFIG: try: config_file = open("config.json", 'r') config = json.load(config_file) if config is None: raise Exception("No Configuration Found") except: print(sys.exc_info()[0]) raise else: raise Exception("Unsupported configuration location %s" % config_location) # resolve password configDetail = config["configuration"] encryptedPassword = configDetail["dbPassword"] encryptedPassword = base64.b64decode(encryptedPassword) if encryptedPassword != "" and encryptedPassword is not None: # decrypt the password using KMS usePassword = kmsConnection.decrypt( CiphertextBlob=encryptedPassword, EncryptionContext=authContext)['Plaintext'] else: raise Exception( "Unable to run Utilities without a configured Password") run_utilities = [] if "ExecuteUtility" in event: if event["ExecuteUtility"] == COLUMN_ENCODING: run_utilities.append(COLUMN_ENCODING) elif event["ExecuteUtility"] == ANALYZE_VACUUM: run_utilities.append(ANALYZE_VACUUM) elif 'utilities' in config: # run each utility, if requested if COLUMN_ENCODING in config["utilities"]: run_utilities.append(COLUMN_ENCODING) if COLUMN_ENCODING in config["utilities"]: run_utilities.append(ANALYZE_VACUUM) else: print("No Utilities configured to run. Exiting!") return results = [] for util in run_utilities: if util == COLUMN_ENCODING: print("Running %s" % util) analyze_schema_compression.configure( configDetail["outputFile"], configDetail["db"], configDetail["dbUser"], usePassword, configDetail["dbHost"], configDetail["dbPort"], configDetail["analyzeSchema"], configDetail["targetSchema"], configDetail["analyzeTable"], configDetail["analyze_col_width"], configDetail["threads"], configDetail["do-execute"], configDetail["querySlotCount"], configDetail["ignoreErrors"], configDetail["force"], configDetail["dropOldData"], configDetail["comprows"], configDetail["queryGroup"], configDetail["debug"], configDetail["ssl-option"], None) encoding_result = analyze_schema_compression.run() results.append(encoding_result) elif util == ANALYZE_VACUUM: print("Running %s" % util) analyze_result = analyze_vacuum.run_analyze_vacuum( configDetail["dbHost"], configDetail["dbPort"], configDetail["dbUser"], usePassword, configDetail["db"], configDetail["queryGroup"], configDetail["querySlotCount"], configDetail["doVacuum"], configDetail["doAnalyze"], configDetail["analyzeSchema"], configDetail["analyzeTable"], configDetail["tableBlacklist"], configDetail["ignoreErrors"], configDetail["ssl-option"]) if analyze_result == 0: results.append("OK") print("Processing Complete") return results
def main(argv): supported_args = """db= db-user= db-pwd= db-host= db-port= schema-name= table-name= blacklisted-tables= suppress-cloudwatch= require-ssl= debug= output-file= slot-count= ignore-errors= query_group= analyze-flag= vacuum-flag= vacuum-parameter= min-unsorted-pct= max-unsorted-pct= stats-off-pct= predicate-cols= max-table-size-mb= min-interleaved-skew= min-interleaved-cnt=""" """ Load connection environment from secret manager """ redshift_secrets_dict = ast.literal_eval(_get_secret('bi/redshift_prod')) # extract the command line arguments try: optlist, remaining = getopt.getopt(argv[1:], "", supported_args.split()) except getopt.GetoptError as err: print(str(err)) usage() args = {config_constants.DB_NAME: redshift_secrets_dict["redshift_db"], config_constants.DB_USER: redshift_secrets_dict["redshift_user"], config_constants.DB_PASSWORD: redshift_secrets_dict["redshift_password"], config_constants.DB_HOST: redshift_secrets_dict["redshift_endpoint"], config_constants.DB_PORT: redshift_secrets_dict["redshift_port"]} # parse command line arguments for arg, value in optlist: if arg == "--db": if value == '': usage() else: args[config_constants.DB_NAME] = value elif arg == "--db-user": if value == '': usage() else: args[config_constants.DB_USER] = value elif arg == "--db-pwd": if value == '': usage() else: args[config_constants.DB_PASSWORD] = value elif arg == "--db-host": if value != '': args[config_constants.DB_HOST] = value elif arg == "--db-port": if value != '' and value is not None: args[config_constants.DB_PORT] = int(value) elif arg == "--require-ssl": if value != '' and value is not None: if value.upper() == 'TRUE' or value == '1': args[config_constants.SSL] = True else: args[config_constants.SSL] = False elif arg == "--schema-name": if value != '' and value is not None: args[config_constants.SCHEMA_NAME] = value elif arg == "--table-name": if value != '' and value is not None: args[config_constants.TABLE_NAME] = value elif arg == "--blacklisted-tables": if value != '' and value is not None: args[config_constants.BLACKLISTED_TABLES] = value elif arg == "--debug": if value.upper() == 'TRUE' or value == '1': args[config_constants.DEBUG] = True elif arg == "--output-file": # open the supplied file path and bind it to stdout sys.stdout = open(value, 'w') elif arg == "--ignore-errors": if value.upper() == 'TRUE' or value == '1': args[config_constants.IGNORE_ERRORS] = True elif arg == "--slot-count": args[config_constants.QUERY_SLOT_COUNT] = int(value) elif arg == "--query_group": if value != '' and value is not None: args[config_constants.QUERY_GROUP] = value elif arg == "--vacuum-flag": if value.upper() == 'TRUE' or value == '1': args[config_constants.DO_VACUUM] = True else: args[config_constants.DO_VACUUM] = False elif arg == "--analyze-flag": if value.upper() == 'TRUE' or value == '1': args[config_constants.DO_ANALYZE] = True else: args[config_constants.DO_ANALYZE] = False elif arg == "--vacuum-parameter": if value.upper() == 'SORT ONLY' or value.upper() == 'DELETE ONLY' or value.upper() == 'REINDEX': args[config_constants.VACUUM_PARAMETER] = value else: args['vacuum_parameter'] = 'FULL' elif arg == "--min-unsorted-pct": if value != '' and value is not None: args[config_constants.MIN_UNSORTED_PCT] = value elif arg == "--max-unsorted-pct": if value != '' and value is not None: args[config_constants.MAX_UNSORTED_PCT] = value elif arg == "--stats-off-pct": if value != '' and value is not None: args[config_constants.STATS_OFF_PCT] = value elif arg == "--predicate-cols": if value.upper() == 'TRUE' or value == '1': args[config_constants.PREDICATE_COLS] = True else: args[config_constants.PREDICATE_COLS] = False elif arg == "--suppress-cloudwatch": if value.upper() == 'TRUE' or value == '1': args[config_constants.SUPPRESS_CLOUDWATCH] = True else: args[config_constants.SUPPRESS_CLOUDWATCH] = False elif arg == "--max-table-size-mb": if value != '' and value is not None: args[config_constants.MAX_TBL_SIZE_MB] = value elif arg == "--min-interleaved-skew": if value != '' and value is not None: args[config_constants.MIN_INTERLEAVED_SKEW] = value elif arg == "--min-interleaved-cnt": if value != '' and value is not None: args[config_constants.MIN_INTERLEAVED_COUNT] = value else: usage("Unsupported Argument " + arg) # Validate that we've got all the args needed if config_constants.DB_NAME not in args: usage("Missing Parameter 'db'") if config_constants.DB_USER not in args: usage("Missing Parameter 'db-user'") if config_constants.DB_PASSWORD not in args: usage("Missing Parameter 'db-pwd'") if config_constants.DB_HOST not in args: usage("Missing Parameter 'db-host'") if config_constants.DB_PORT not in args: usage("Missing Parameter 'db-port'") if config_constants.OUTPUT_FILE in args: sys.stdout = open(args['output_file'], 'w') # invoke the main method of the utility result = analyze_vacuum.run_analyze_vacuum(**args) if result is not None: sys.exit(result) else: sys.exit(0)
def main(argv): supported_args = """db= db-user= db-pwd= db-host= db-port= schema-name= table-name= blacklisted-tables= require-ssl= debug= output-file= slot-count= ignore-errors= query_group= analyze-flag= vacuum-flag= vacuum-parameter= min-unsorted-pct= max-unsorted-pct= deleted-pct= stats-off-pct= predicate-cols= max-table-size-mb= min-interleaved-skew= min-interleaved-cnt=""" # extract the command line arguments try: optlist, remaining = getopt.getopt(argv[1:], "", supported_args.split()) except getopt.GetoptError as err: print(str(err)) usage() args = {} args['db'] = get_env_var('PGDATABASE', None) args['db_user'] = get_env_var('PGUSER', None) args['db_pwd'] = get_env_var('PGPASSWORD', None) args['db_host'] = get_env_var('PGHOST', None) args['db_port'] = get_env_var('PGPORT', 5439) # parse command line arguments for arg, value in optlist: if arg == "--db": if value == '': usage() else: args['db'] = value elif arg == "--db-user": if value == '': usage() else: args['db_user'] = value elif arg == "--db-pwd": if value == '': usage() else: args['db_pwd'] = value elif arg == "--db-host": if value == '': usage() else: args['db_host'] = value elif arg == "--db-port": if value != '' and value is not None: args['db_port'] = int(value) elif arg == "--require-ssl": if value != '' and value is not None: if value.upper() == 'TRUE' or value == '1': args['require_ssl'] = True elif arg == "--schema-name": if value != '' and value is not None: args['schema_name'] = value elif arg == "--table-name": if value != '' and value is not None: args['table_name'] = value elif arg == "--blacklisted-tables": if value != '' and value is not None: args['blacklisted_tables'] = value elif arg == "--debug": if value.upper() == 'TRUE': args['debug'] = True elif arg == "--output-file": # open the supplied file path and bind it to stdout sys.stdout = open(value, 'w') elif arg == "--ignore-errors": if value.upper() == 'TRUE': args['ignore_errors'] = True elif arg == "--slot-count": args['query_slot_count'] = int(value) elif arg == "--query_group": if value != '' and value is not None: args['query_group'] = value elif arg == "--vacuum-flag": if value.upper() == 'FALSE': args['vacuum_flag'] = False else: args['vacuum_flag'] = True elif arg == "--analyze-flag": if value.upper() == 'FALSE': args['analyze_flag'] = False else: args['analyze_flag'] = True elif arg == "--vacuum-parameter": if value.upper() == 'SORT ONLY' or value.upper( ) == 'DELETE ONLY' or value.upper() == 'REINDEX': args['vacuum_parameter'] = value elif arg == "--min-unsorted-pct": if value != '' and value is not None: args['min_unsorted_pct'] = value elif arg == "--max-unsorted-pct": if value != '' and value is not None: args['max_unsorted_pct'] = value elif arg == "--deleted-pct": if value != '' and value is not None: args['deleted_pct'] = value elif arg == "--stats-off-pct": if value != '' and value is not None: args['stats_off_pct'] = value elif arg == "--predicate-cols": if value.upper() == 'TRUE': args['predicate_cols'] = True elif arg == "--max-table-size-mb": if value != '' and value is not None: args['max_table_size_mb'] = value elif arg == "--min-interleaved-skew": if value != '' and value is not None: args['min_interleaved_skew'] = value elif arg == "--min-interleaved-cnt": if value != '' and value is not None: args['min_interleaved_cnt'] = value else: usage("Unsupported Argument " + arg) # Validate that we've got all the args needed if 'db' not in args: usage("Missing Parameter 'db'") if 'db_user' not in args: usage("Missing Parameter 'db-user'") if 'db_pwd' not in args: usage("Missing Parameter 'db-pwd'") if 'db_host' not in args: usage("Missing Parameter 'db-host'") if 'db_port' not in args: usage("Missing Parameter 'db-port'") if 'output_file' in args: sys.stdout = open(args['output_file'], 'w') # invoke the main method of the utility result = analyze_vacuum.run_analyze_vacuum(**args) if result is not None: sys.exit(result) else: sys.exit(0)
def event_handler(event, context): current_region = 'us-east-1' if region_key not in os.environ: print("Warning - using default region %s" % current_region) else: current_region = os.environ[region_key] kms_connection = boto3.client('kms', region_name=current_region) # load the configuration file config_location = config_constants.LOCAL_CONFIG if event is not None and 'ConfigLocation' in event: config_location = event['ConfigLocation'] global debug config = common.get_config(config_location, current_region, debug) if config_constants.DEBUG in config and config[config_constants.DEBUG]: debug = True if debug: print("Configuration File Contents:") print(config) # extract the password use_password = common.get_password(kms_connection, config, debug) # bind the password back into the configuration so we can pass it forward config[config_constants.DB_PASSWORD] = use_password run_utilities = [] if event is not None and "ExecuteUtility" in event: if event["ExecuteUtility"] == config_constants.COLUMN_ENCODING: run_utilities.append(config_constants.COLUMN_ENCODING) elif event["ExecuteUtility"] == config_constants.ANALYZE_VACUUM: run_utilities.append(config_constants.ANALYZE_VACUUM) elif event["ExecuteUtility"] == config_constants.ANALYZE: run_utilities.append(config_constants.ANALYZE) elif event["ExecuteUtility"] == config_constants.VACUUM: run_utilities.append(config_constants.VACUUM) elif event["ExecuteUtility"] == config_constants.MONITORING: run_utilities.append(config_constants.MONITORING) elif event["ExecuteUtility"] == config_constants.TABLE_PERSISTENCE: run_utilities.append(config_constants.TABLE_PERSISTENCE) elif event["ExecuteUtility"] == config_constants.WLM_SCHEDULER: run_utilities.append(config_constants.WLM_SCHEDULER) elif 'utilities' in config: # run each utility, if requested if config_constants.COLUMN_ENCODING in config["utilities"]: run_utilities.append(config_constants.COLUMN_ENCODING) if config_constants.ANALYZE_VACUUM in config["utilities"]: run_utilities.append(config_constants.ANALYZE_VACUUM) if config_constants.ANALYZE in config["utilities"]: run_utilities.append(config_constants.ANALYZE) if config_constants.VACUUM in config["utilities"]: run_utilities.append(config_constants.VACUUM) if config_constants.MONITORING in config["utilities"]: run_utilities.append(config_constants.MONITORING) if config_constants.TABLE_PERSISTENCE in config["utilities"]: run_utilities.append(config_constants.TABLE_PERSISTENCE) if config_constants.WLM_SCHEDULER in config["utilities"]: run_utilities.append(config_constants.WLM_SCHEDULER) else: print("No Utilities configured to run. Exiting!") return results = [] for util in run_utilities: if util == config_constants.COLUMN_ENCODING: print("Running %s" % util) analyze_schema_compression.configure(**config) encoding_result = analyze_schema_compression.run() results.append(encoding_result) elif util == config_constants.ANALYZE_VACUUM: print("Running %s" % util) analyze_result = analyze_vacuum.run_analyze_vacuum(**config) if analyze_result == 0: results.append("OK") elif util == config_constants.ANALYZE: print("Running %s" % util) # turn on correct flag config[config_constants.DO_ANALYZE] = True config[config_constants.DO_VACUUM] = False analyze_result = analyze_vacuum.run_analyze_vacuum(**config) if analyze_result == 0: results.append("OK") elif util == config_constants.VACUUM: print("Running %s" % util) # turn on correct flag config[config_constants.DO_ANALYZE] = False config[config_constants.DO_VACUUM] = True analyze_result = analyze_vacuum.run_analyze_vacuum(**config) if analyze_result == 0: results.append("OK") elif util == config_constants.MONITORING: print("Running %s" % util) redshift_monitoring.monitor_cluster([config, os.environ]) elif util == config_constants.TABLE_PERSISTENCE: print("Running %s" % util) snapshot_system_stats.snapshot([config, os.environ]) elif util == config_constants.WLM_SCHEDULER: print("Running %s" % util) wlm_scheduler.run_scheduler(config) print("Processing Complete") return results