def test_colorclass_colors(): "Regression: ANSI colors in a unicode/str subclass (issue #49)" try: import colorclass s = colorclass.Color("{magenta}3.14{/magenta}") result = tabulate([[s]], tablefmt="plain") expected = "\x1b[35m3.14\x1b[39m" assert_equal(result, expected) except ImportError: class textclass(_text_type): pass s = textclass("\x1b[35m3.14\x1b[39m") result = tabulate([[s]], tablefmt="plain") expected = "\x1b[35m3.14\x1b[39m" assert_equal(result, expected)
def write_row(self, row, last=False, colors=None): assert len(row) == self.num_columns columns = [] max_lines = 0 for i in xrange(self.num_columns): cell = row[i] # Convert to string: cell = to_ustr(cell, encoding=self.encoding_in) # Wrap cell text according to the column width # TODO: use a TextWrapper object for each column instead # split the string if it contains newline characters, otherwise # textwrap replaces them with spaces: column = [] for line in cell.splitlines(): column.extend(textwrap.wrap(line, width=self.column_width[i])) # apply colors to each line of the cell if needed: if colors is not None and self.outfile.isatty(): color = colors[i] if color: for j in xrange(len(column)): # print '%r: %s' % (column[j], type(column[j])) column[j] = colorclass.Color(u'{auto%s}%s{/%s}' % (color, column[j], color)) columns.append(column) # determine which column has the highest number of lines max_lines = max(len(columns[i]), max_lines) # transpose: write output line by line for j in xrange(max_lines): self.write(self.style.vertical_left) for i in xrange(self.num_columns): column = columns[i] if j<len(column): # text to be written text_width = len(column[j]) self.write(column[j] + u' '*(self.column_width[i]-text_width)) else: # no more lines for this column # TODO: precompute empty cells once self.write(u' '*(self.column_width[i])) if i < (self.num_columns - 1): self.write(self.style.vertical_middle) self.write(self.style.vertical_right) self.write('\n') if self.style.sep and not last: self.write_sep()
def run_tasks_for_bootstrap_spokes_in_ou(tasks_to_run, num_workers): for result_type in [ "start", "failure", "success", "timeout", "process_failure", "processing_time", "broken_task", ]: os.makedirs(Path(constants.RESULTS_DIRECTORY) / result_type) run_result = luigi.build( tasks_to_run, local_scheduler=True, detailed_summary=True, workers=num_workers, log_level=os.environ.get("LUIGI_LOG_LEVEL", constants.LUIGI_DEFAULT_LOG_LEVEL), ) for filename in glob("results/failure/*.json"): result = json.loads(open(filename, "r").read()) click.echo( colorclass.Color("{red}" + result.get("task_type") + " failed{/red}")) click.echo( f"{yaml.safe_dump({'parameters': result.get('task_params')})}") click.echo("\n".join(result.get("exception_stack_trace"))) click.echo("") exit_status_codes = { LuigiStatusCode.SUCCESS: 0, LuigiStatusCode.SUCCESS_WITH_RETRY: 0, LuigiStatusCode.FAILED: 1, LuigiStatusCode.FAILED_AND_SCHEDULING_FAILED: 2, LuigiStatusCode.SCHEDULING_FAILED: 3, LuigiStatusCode.NOT_RUN: 4, LuigiStatusCode.MISSING_EXT: 5, } sys.exit(exit_status_codes.get(run_result.status))
def echo(buf="", stripcolor=False, level=None, datestamp=False, end="\n", auto=True, **kw): from dateutil.tz import tzlocal from datetime import datetime from time import strftime buf = str(buf) if datestamp is True: now = datetime.now(tzlocal()) stamp = strftime("%Y-%b-%d %I:%M:%S%P %Z (%a)", now.timetuple()) buf = "%s %s" % (stamp, buf) if level is not None: if level == "debug": buf = "{autoblue}%s{/autoblue}" % (buf) elif level == "warn": buf = "{autoyellow}%s{/autoyellow}" % (buf) elif level == "error": buf = "{autored}%s{/autored}" % (buf) elif level == "success": buf = "{autogreen}%s{/autogreen}" % (buf) if isinstance(buf, colorclass.Color) is False: buf = colorclass.Color(buf) if stripcolor is False: print(buf, end=end) else: print(buf.value_no_colors, end=end) return
def inputstring(prompt: str, oldvalue: str = None, opts: object = None, timeout=0, timeouthandler=timeouthandler, mask=None, **kw) -> str: import readline def preinputhook(): readline.insert_text(str(oldvalue)) readline.redisplay() if oldvalue is not None: readline.set_pre_input_hook(preinputhook) try: inputfunc = raw_input except NameError: inputfunc = input oldcompleter = readline.get_completer() olddelims = readline.get_completer_delims() multiple = kw["multiple"] if "multiple" in kw else None completer = kw["completer"] if "completer" in kw else None if completer is not None and callable(completer.completer) is True: if opts is not None and opts.debug is True: echo("setting completer function", level="debug") readline.parse_and_bind("tab: complete") readline.set_completer(completer.completer) if multiple is True: readline.set_completer_delims(", ") while True: # signal.signal(signal.SIGALRM, timeouthandler) # signal.alarm(timeout) prompt = colorclass.Color(prompt) try: buf = inputfunc(prompt) except KeyboardInterrupt: echo("INTR") raise except EOFError: echo("EOF") raise finally: print("\x1b[0m", end="") # signal.alarm(0) if oldvalue is not None: readline.set_pre_input_hook(None) if buf is None or buf == "": if "noneok" in kw and kw["noneok"] is True: return None else: return oldvalue if mask is not None: echo(re.match(mask, buf), level="debug") if re.match(mask, buf) is None: echo("invalid input") echo() continue if multiple is True: completions = buf.split(",") else: completions = [buf] completions = [c.strip() for c in completions] if "verify" in kw and callable(kw["verify"]): verify = kw["verify"] else: result = buf break bang = [] for c in completions: bang += c.split(" ") completions = bang validcompletions = [] if opts is not None and opts.debug is True: echo("inputstring.200: verify is callable", level="debug") invalid = 0 for c in completions: if verify(opts, c) is True: validcompletions.append(c) else: echo("%r is not valid" % (c)) invalid += 1 continue if invalid == 0: echo("inputstring.220: no invalid entries, exiting loop") result = validcompletions break readline.set_completer(oldcompleter) readline.set_completer_delims(olddelims) return result
def run_tasks( puppet_account_id, current_account_id, tasks_to_run, num_workers, is_dry_run=False, is_list_launches=None, execution_mode="hub", on_complete_url=None, running_exploded=False, output_cache_starting_point="", ): codebuild_id = os.getenv("CODEBUILD_BUILD_ID", "LOCAL_BUILD") if is_list_launches: should_use_eventbridge = False should_forward_failures_to_opscenter = False else: should_use_eventbridge = (config.get_should_use_eventbridge( puppet_account_id, os.environ.get("AWS_DEFAULT_REGION")) and not is_dry_run) should_forward_failures_to_opscenter = ( config.get_should_forward_failures_to_opscenter( puppet_account_id, os.environ.get("AWS_DEFAULT_REGION")) and not is_dry_run) ssm_client = None if should_forward_failures_to_opscenter: with betterboto_client.ClientContextManager("ssm") as ssm: ssm_client = ssm entries = [] for result_type in [ "start", "failure", "success", "timeout", "process_failure", "processing_time", "broken_task", ]: os.makedirs(Path(constants.RESULTS_DIRECTORY) / result_type) os.makedirs(Path(constants.OUTPUT)) logger.info(f"About to run workflow with {num_workers} workers") if not (running_exploded or is_list_launches): tasks.print_stats() if is_list_launches: should_use_shared_scheduler = False else: should_use_shared_scheduler = config.get_should_use_shared_scheduler( puppet_account_id) build_params = dict( detailed_summary=True, workers=num_workers, log_level=os.environ.get("LUIGI_LOG_LEVEL", constants.LUIGI_DEFAULT_LOG_LEVEL), ) if should_use_shared_scheduler: os.system(constants.START_SHARED_SCHEDULER_COMMAND) else: build_params["local_scheduler"] = True if should_use_shared_scheduler: logger.info( f"should_use_shared_scheduler: {should_use_shared_scheduler}") if output_cache_starting_point != "": dst = "GetSSMParamTask.zip" urlretrieve(output_cache_starting_point, dst) shutil.unpack_archive("GetSSMParamTask.zip", ".", "zip") run_result = luigi.build(tasks_to_run, **build_params) exit_status_codes = { LuigiStatusCode.SUCCESS: 0, LuigiStatusCode.SUCCESS_WITH_RETRY: 0, LuigiStatusCode.FAILED: 1, LuigiStatusCode.FAILED_AND_SCHEDULING_FAILED: 2, LuigiStatusCode.SCHEDULING_FAILED: 3, LuigiStatusCode.NOT_RUN: 4, LuigiStatusCode.MISSING_EXT: 5, } cache_invalidator = os.environ.get("SCT_CACHE_INVALIDATOR") has_spoke_failures = False if execution_mode == constants.EXECUTION_MODE_HUB: logger.info("Checking spoke executions...") all_run_deploy_in_spoke_tasks = glob( f"output/RunDeployInSpokeTask/**/{cache_invalidator}.json", recursive=True, ) n_all_run_deploy_in_spoke_tasks = len(all_run_deploy_in_spoke_tasks) index = 0 for filename in all_run_deploy_in_spoke_tasks: result = json.loads(open(filename, "r").read()) spoke_account_id = result.get("account_id") build = result.get("build") build_id = build.get("id") logger.info( f"[{index}/{n_all_run_deploy_in_spoke_tasks}] Checking spoke execution for account: {spoke_account_id} build: {build_id}" ) index += 1 status = "IN_PROGRESS" while status == "IN_PROGRESS": status, build, has_failed, is_running = get_build_status_for_in( build_id, spoke_account_id) if has_failed: has_spoke_failures = True params_for_results = dict(account_id=spoke_account_id, build_id=build_id) for ev in build.get("environment").get( "environmentVariables", []): params_for_results[ev.get("name")] = ev.get("value") failure = dict( event_type="failure", task_type="RunDeployInSpokeTask", task_params=params_for_results, params_for_results=params_for_results, exception_type="<class 'Exception'>", exception_stack_trace=[ f"Codebuild in spoke did not succeed: {build.get('buildStatus')}" ], ) open( f"results/failure/RunDeployInSpokeTask-{spoke_account_id}.json", "w", ).write(json.dumps(failure)) elif is_running: time.sleep(10) dry_run_tasks = (glob( f"output/ProvisionProductDryRunTask/**/{cache_invalidator}.json", recursive=True, ) + glob( f"output/TerminateProductDryRunTask/**/{cache_invalidator}.json", recursive=True, ) + glob( f"output/ProvisionStackDryRunTask/**/{cache_invalidator}.json", recursive=True, ) + glob( f"output/TerminateStackDryRunTask/**/{cache_invalidator}.json", recursive=True, )) if is_list_launches: if is_list_launches == "table": table = [[ "account_id", "region", "launch/stack", "portfolio", "product", "expected_version", "actual_version", "active", "status", ]] for filename in dry_run_tasks: result = json.loads(open(filename, "r").read()) current_version = ( Color("{green}" + result.get("current_version") + "{/green}") if result.get("current_version") == result.get("new_version") else Color("{red}" + result.get("current_version") + "{/red}")) active = (Color("{green}" + str(result.get("active")) + "{/green}") if result.get("active") else Color("{red}" + str(result.get("active")) + "{/red}")) current_status = (Color("{green}" + result.get("current_status") + "{/green}") if result.get("current_status") == "AVAILABLE" else Color("{red}" + result.get("current_status") + "{/red}")) table.append([ result.get("params").get("account_id"), result.get("params").get("region"), f'Launch:{result.get("params").get("launch_name")}' if result.get("params").get("launch_name") else f'Stack:{result.get("params").get("stack_name")}', result.get("params").get("portfolio"), result.get("params").get("product"), result.get("new_version"), current_version, active, current_status, ]) click.echo(terminaltables.AsciiTable(table).table) elif is_list_launches == "json": results = dict() for filename in glob( f"output/ProvisionProductDryRunTask/**/{cache_invalidator}.json", recursive=True, ): result = json.loads(open(filename, "r").read()) account_id = result.get("params").get("account_id") region = result.get("params").get("region") launch_name = result.get("params").get("launch_name") results[f"{account_id}_{region}_{launch_name}"] = dict( account_id=account_id, region=region, launch=launch_name, portfolio=result.get("params").get("portfolio"), product=result.get("params").get("product"), expected_version=result.get("new_version"), actual_version=result.get("current_version"), active=result.get("active"), status=result.get("current_status"), ) click.echo(json.dumps( results, indent=4, default=str, )) else: raise Exception(f"Unsupported format: {is_list_launches}") else: click.echo("Results") if is_dry_run: table_data = [ [ "Result", "Launch/Stack", "Account", "Region", "Current Version", "New Version", "Notes", ], ] table = terminaltables.AsciiTable(table_data) for filename in dry_run_tasks: result = json.loads(open(filename, "r").read()) table_data.append([ result.get("effect"), f'Launch:{result.get("params").get("launch_name")}' if result.get("params").get("launch_name") else f'Stack:{result.get("params").get("stack_name")}', result.get("params").get("account_id"), result.get("params").get("region"), result.get("current_version"), result.get("new_version"), result.get("notes"), ]) click.echo(table.table) else: table_data = [ ["Action", "Params", "Duration"], ] table = terminaltables.AsciiTable(table_data) for filename in glob("results/processing_time/*.json"): result_contents = open(filename, "r").read() result = json.loads(result_contents) params = result.get("params_for_results") if should_use_eventbridge: entries.append({ # 'Time': , "Source": constants.SERVICE_CATALOG_PUPPET_EVENT_SOURCE, "Resources": [ # 'string', ], "DetailType": result.get("task_type"), "Detail": result_contents, "EventBusName": constants.EVENT_BUS_IN_SPOKE_NAME if execution_mode == constants.EXECUTION_MODE_SPOKE else constants.EVENT_BUS_NAME, }) params = yaml.safe_dump(params) table_data.append([ result.get("task_type"), params, result.get("duration"), ]) click.echo(table.table) for filename in glob("results/failure/*.json"): result = json.loads(open(filename, "r").read()) params = result.get("params_for_results") if should_forward_failures_to_opscenter: title = f"{result.get('task_type')} failed: {params.get('launch_name')} - {params.get('account_id')} - {params.get('region')}" logging.info(f"Sending failure to opscenter: {title}") operational_data = dict(codebuild_id=dict( Value=codebuild_id, Type="SearchableString")) for param_name, param in params.items(): operational_data[param_name] = { "Value": json.dumps(param, default=str), "Type": "SearchableString", } description = "\n".join( result.get("exception_stack_trace"))[-1024:] ssm_client.create_ops_item( Title=title, Description=description, OperationalData=operational_data, Priority=1, Source=constants. SERVICE_CATALOG_PUPPET_OPS_CENTER_SOURCE, Tags=[ { "Key": "ServiceCatalogPuppet:Actor", "Value": "ops-item" }, ], ) click.echo( colorclass.Color("{red}" + result.get("task_type") + " failed{/red}")) click.echo( f"{yaml.safe_dump({'parameters': result.get('task_params')})}" ) click.echo("\n".join(result.get("exception_stack_trace"))) click.echo("") if should_use_eventbridge: logging.info(f"Sending {len(entries)} events to eventbridge") with betterboto_client.CrossAccountClientContextManager( "events", config.get_puppet_role_arn(current_account_id), f"{current_account_id}-{config.get_puppet_role_name()}", ) as events: for i in range(0, len(entries), constants.EVENTBRIDGE_MAX_EVENTS_PER_CALL): events.put_events( Entries=entries[i:i + constants. EVENTBRIDGE_MAX_EVENTS_PER_CALL]) time.sleep(1) logging.info( f"Finished sending {len(entries)} events to eventbridge") exit_status_code = exit_status_codes.get(run_result.status) if on_complete_url: logger.info(f"About to post results") if exit_status_code == 0: result = dict( Status="SUCCESS", Reason=f"All tasks run with success: {codebuild_id}", UniqueId=codebuild_id.replace(":", "").replace("-", ""), Data=f"{codebuild_id}", ) else: result = dict( Status="FAILURE", Reason=f"All tasks did not run with success: {codebuild_id}", UniqueId=codebuild_id.replace(":", "").replace("-", ""), Data=f"{codebuild_id}", ) req = urllib.request.Request(url=on_complete_url, data=json.dumps(result).encode(), method="PUT") with urllib.request.urlopen(req) as f: pass logger.info(f.status) logger.info(f.reason) if running_exploded: pass else: if has_spoke_failures: sys.exit(1) else: sys.exit(exit_status_code)
data = json.loads(result.text) orders = {} buy_orders = [] for order in data['payload']['buy_orders']: if (order['region'] == 'en') and (order['platform'] == 'pc'): item_name = order['item']['url_name'] mod_rank = str(order.get('mod_rank', 0)) get_order(orders, order) get_stats(stats, order) buy_orders.append([ colorclass.Color(order['item']['en']['item_name']), colorclass.Color(mod_rank), colorclass.Color( str(order['item'].get('mod_max_rank', 0))), colorclass.Color(str(order['quantity'])), colorclass.Color( str( int( round(stats[item_name][mod_rank] ['buy_90_day']))) + 'p'), colorclass.Color( str( int( round(stats[item_name][mod_rank] ['buy_48_hr']))) + 'p'), colorclass.Color(
def fail(string): print( colorclass.Color("{hired}[" + NEGATIVE + "]{/red}") + SEPARATOR + string)
def generate_via_luigi(p, branch_override=None): factory_version = constants.VERSION if branch_override is None else "https://github.com/awslabs/aws-service-catalog-factory/archive/{}.zip".format(branch_override) logger.info('Generating') all_tasks = {} all_regions = get_regions() products_by_region = {} version_pipelines_to_build = [] for portfolio_file_name in os.listdir(p): if '.yaml' in portfolio_file_name: p_name = portfolio_file_name.split(".")[0] output_path = os.path.sep.join([constants.OUTPUT, p_name]) portfolios_file_path = os.path.sep.join([p, portfolio_file_name]) portfolios = generate_portfolios(portfolios_file_path) for region in all_regions: for portfolio in portfolios.get('Portfolios', []): create_portfolio_task_args = { "region": region, "portfolio_group_name": p_name, "display_name": portfolio.get('DisplayName'), "description": portfolio.get('Description'), "provider_name": portfolio.get('ProviderName'), "tags": portfolio.get('Tags'), } create_portfolio_task = luigi_tasks_and_targets.CreatePortfolioTask( **create_portfolio_task_args ) all_tasks[f"portfolio_{p_name}_{portfolio.get('DisplayName')}-{region}"] = create_portfolio_task create_portfolio_association_task = luigi_tasks_and_targets.CreatePortfolioAssociationTask( **create_portfolio_task_args, associations=portfolio.get('Associations'), factory_version=factory_version, ) all_tasks[ f"portfolio_associations_{p_name}_{portfolio.get('DisplayName')}-{region}" ] = create_portfolio_association_task nested_products = portfolio.get('Products', []) + portfolio.get('Components', []) for product in nested_products: product_uid = f"{product.get('Name')}" if products_by_region.get(product_uid) is None: products_by_region[product_uid] = {} create_product_task_args = { "region": region, "name": product.get('Name'), "owner": product.get('Owner'), "description": product.get('Description'), "distributor": product.get('Distributor'), "support_description": product.get('SupportDescription'), "support_email": product.get('SupportEmail'), "support_url": product.get('SupportUrl'), "tags": product.get('Tags'), "uid": "-".join([ create_portfolio_task_args.get('portfolio_group_name'), create_portfolio_task_args.get('display_name'), product.get('Name'), ]) } products_by_region[product_uid][region] = create_product_task_args create_product_task = luigi_tasks_and_targets.CreateProductTask( **create_product_task_args ) all_tasks[ f"product_{p_name}_{portfolio.get('DisplayName')}_{product.get('Name')}-{region}" ] = create_product_task associate_product_with_portfolio_task = luigi_tasks_and_targets.AssociateProductWithPortfolioTask( region=region, portfolio_args=create_portfolio_task_args, product_args=create_product_task_args, ) all_tasks[ f"association_{p_name}_{portfolio.get('DisplayName')}_{product.get('Name')}-{region}" ] = associate_product_with_portfolio_task for version in product.get('Versions', []): ensure_product_version_details_correct_task = luigi_tasks_and_targets.EnsureProductVersionDetailsCorrect( region=region, version=version, product_args=create_product_task_args, ) version_pipelines_to_build.append({ 'create_product_task_args': create_product_task_args, 'product':product, 'version':version, }) all_tasks[ f"version_{p_name}_{portfolio.get('Name')}_{product.get('Name')}_{version.get('Name')}-{region}" ] = ensure_product_version_details_correct_task for product in portfolios.get('Products', []): product_uid = f"{product.get('Name')}" if products_by_region.get(product_uid) is None: products_by_region[product_uid] = {} create_product_task_args = { "region": region, "name": product.get('Name'), "owner": product.get('Owner'), "description": product.get('Description'), "distributor": product.get('Distributor'), "support_description": product.get('SupportDescription'), "support_email": product.get('SupportEmail'), "support_url": product.get('SupportUrl'), "tags": product.get('Tags'), "uid": product.get('Name'), } products_by_region[product_uid][region] = create_product_task_args create_product_task = luigi_tasks_and_targets.CreateProductTask( **create_product_task_args ) for portfolio in product.get('Portfolios', []): create_portfolio_task_args = all_tasks[f"portfolio_{p_name}_{portfolio}-{region}"].param_kwargs associate_product_with_portfolio_task = luigi_tasks_and_targets.AssociateProductWithPortfolioTask( region=region, portfolio_args=create_portfolio_task_args, product_args=create_product_task_args, ) all_tasks[ f"association_{portfolio}_{product.get('Name')}-{region}" ] = associate_product_with_portfolio_task for version in product.get('Versions', []): version_pipelines_to_build.append({ 'create_product_task_args': create_product_task_args, 'product': product, 'version': version, }) ensure_product_version_details_correct_task = luigi_tasks_and_targets.EnsureProductVersionDetailsCorrect( region=region, version=version, product_args=create_product_task_args, ) all_tasks[ f"version_{product.get('Name')}_{version.get('Name')}-{region}" ] = ensure_product_version_details_correct_task all_tasks[f"product_{p_name}-{region}"] = create_product_task logger.info("Going to create pipeline tasks") for version_pipeline_to_build in version_pipelines_to_build: product_name = version_pipeline_to_build.get('product').get('Name') create_args = { "all_regions": all_regions, "version": version_pipeline_to_build.get('version'), "product": version_pipeline_to_build.get('product'), "provisioner": version_pipeline_to_build.get('version').get('Provisioner', {'Type': 'CloudFormation'}), "products_args_by_region": products_by_region.get(product_name), "factory_version": factory_version, } t = luigi_tasks_and_targets.CreateVersionPipelineTemplateTask( **create_args ) logger.info(f"created pipeline_template_{product_name}-{version_pipeline_to_build.get('version').get('Name')}") all_tasks[f"pipeline_template_{product_name}-{version_pipeline_to_build.get('version').get('Name')}"] = t t = luigi_tasks_and_targets.CreateVersionPipelineTask( **create_args, ) logger.info(f"created pipeline_{product_name}-{version_pipeline_to_build.get('version').get('Name')}") all_tasks[f"pipeline_{product_name}-{version_pipeline_to_build.get('version').get('Name')}"] = t for type in ["failure", "success", "timeout", "process_failure", "processing_time", "broken_task", ]: os.makedirs(Path(constants.RESULTS_DIRECTORY) / type) run_result = luigi.build( all_tasks.values(), local_scheduler=True, detailed_summary=True, workers=10, log_level='INFO', ) table_data = [ ['Result', 'Task', 'Significant Parameters', 'Duration'], ] table = terminaltables.AsciiTable(table_data) for filename in glob('results/processing_time/*.json'): result = json.loads(open(filename, 'r').read()) table_data.append([ colorclass.Color("{green}Success{/green}"), result.get('task_type'), yaml.safe_dump(result.get('params_for_results')), result.get('duration'), ]) click.echo(table.table) for filename in glob('results/failure/*.json'): result = json.loads(open(filename, 'r').read()) click.echo(colorclass.Color("{red}"+result.get('task_type')+" failed{/red}")) click.echo(f"{yaml.safe_dump({'parameters':result.get('task_params')})}") click.echo("\n".join(result.get('exception_stack_trace'))) click.echo('')
def run_tasks(tasks_to_run, num_workers, dry_run=False): should_use_eventbridge = config.get_should_use_eventbridge( os.environ.get("AWS_DEFAULT_REGION")) and not dry_run should_forward_failures_to_opscenter = config.get_should_forward_failures_to_opscenter( os.environ.get("AWS_DEFAULT_REGION")) and not dry_run ssm_client = None if should_forward_failures_to_opscenter: with betterboto_client.ClientContextManager('ssm') as ssm: ssm_client = ssm entries = [] for type in [ "failure", "success", "timeout", "process_failure", "processing_time", "broken_task", ]: os.makedirs(Path(constants.RESULTS_DIRECTORY) / type) logger.info(f"About to run workflow with {num_workers} workers") run_result = luigi.build( tasks_to_run, local_scheduler=True, detailed_summary=True, workers=num_workers, log_level='INFO', ) exit_status_codes = { LuigiStatusCode.SUCCESS: 0, LuigiStatusCode.SUCCESS_WITH_RETRY: 0, LuigiStatusCode.FAILED: 1, LuigiStatusCode.FAILED_AND_SCHEDULING_FAILED: 2, LuigiStatusCode.SCHEDULING_FAILED: 3, LuigiStatusCode.NOT_RUN: 4, LuigiStatusCode.MISSING_EXT: 5, } click.echo("Results") if dry_run: table_data = [ [ 'Result', 'Launch', 'Account', 'Region', 'Current Version', 'New Version', 'Notes' ], ] table = terminaltables.AsciiTable(table_data) for filename in glob('output/TerminateProductDryRunTask/*.json'): result = json.loads(open(filename, 'r').read()) table_data.append([ result.get('effect'), result.get('params').get('launch_name'), result.get('params').get('account_id'), result.get('params').get('region'), result.get('current_version'), result.get('new_version'), result.get('notes'), ]) for filename in glob('output/ProvisionProductDryRunTask/*.json'): result = json.loads(open(filename, 'r').read()) table_data.append([ result.get('effect'), result.get('params').get('launch_name'), result.get('params').get('account_id'), result.get('params').get('region'), result.get('current_version'), result.get('new_version'), result.get('notes'), ]) click.echo(table.table) else: table_data = [ ['Action', 'Params', 'Duration'], ] table = terminaltables.AsciiTable(table_data) for filename in glob('results/processing_time/*.json'): result_contents = open(filename, 'r').read() result = json.loads(result_contents) params = result.get('params_for_results') if should_use_eventbridge: entries.append({ # 'Time': , 'Source': constants.SERVICE_CATALOG_PUPPET_EVENT_SOURCE, 'Resources': [ # 'string', ], 'DetailType': result.get('task_type'), 'Detail': result_contents, 'EventBusName': constants.EVENT_BUS_NAME }) params = yaml.safe_dump(params) table_data.append([ result.get('task_type'), params, result.get('duration'), ]) click.echo(table.table) for filename in glob('results/failure/*.json'): result = json.loads(open(filename, 'r').read()) params = result.get('params_for_results') if should_forward_failures_to_opscenter: title = f"{result.get('task_type')} failed: {params.get('launch_name')} - {params.get('account_id')} - {params.get('region')}" logging.info(f"Sending failure to opscenter: {title}") operational_data = {} for param_name, param in params.items(): operational_data[param_name] = { "Value": json.dumps(param, default=str), 'Type': 'SearchableString', } description = "\n".join( result.get('exception_stack_trace'))[-1024:] ssm_client.create_ops_item( Title=title, Description=description, OperationalData=operational_data, Priority=1, Source=constants.SERVICE_CATALOG_PUPPET_OPS_CENTER_SOURCE, Tags=[ { 'Key': 'ServiceCatalogPuppet:Actor', 'Value': 'ops-item' }, ]) click.echo( colorclass.Color("{red}" + result.get('task_type') + " failed{/red}")) click.echo( f"{yaml.safe_dump({'parameters':result.get('task_params')})}") click.echo("\n".join(result.get('exception_stack_trace'))) click.echo('') if should_use_eventbridge: logging.info(f"Sending {len(entries)} events to eventbridge") with betterboto_client.ClientContextManager('events') as events: for i in range(0, len(entries), constants.EVENTBRIDGE_MAX_EVENTS_PER_CALL): events.put_events( Entries=entries[i:i + constants. EVENTBRIDGE_MAX_EVENTS_PER_CALL]) time.sleep(1) logging.info( f"Finished sending {len(entries)} events to eventbridge") sys.exit(exit_status_codes.get(run_result.status))
def run_tasks_for_generate_shares(tasks_to_run): for type in [ "failure", "success", "timeout", "process_failure", "processing_time", "broken_task", ]: os.makedirs(Path(constants.RESULTS_DIRECTORY) / type) run_result = luigi.build( tasks_to_run, local_scheduler=True, detailed_summary=True, workers=10, log_level='INFO', ) should_use_sns = config.get_should_use_sns() puppet_account_id = config.get_puppet_account_id() version = config.get_puppet_version() for region in config.get_regions(): sharing_policies = { 'accounts': [], 'organizations': [], } with betterboto_client.ClientContextManager( 'cloudformation', region_name=region) as cloudformation: cloudformation.ensure_deleted( StackName="servicecatalog-puppet-shares") logger.info(f"generating policies collection for region {region}") if os.path.exists(os.path.sep.join(['data', 'bucket'])): logger.info(f"Updating policies for the region: {region}") path = os.path.sep.join(['data', 'bucket', region, 'accounts']) if os.path.exists(path): for account_file in os.listdir(path): account = account_file.split(".")[0] sharing_policies['accounts'].append(account) path = os.path.sep.join( ['data', 'bucket', region, 'organizations']) if os.path.exists(path): for organization_file in os.listdir(path): organization = organization_file.split(".")[0] sharing_policies['organizations'].append(organization) logger.info(f"Finished generating policies collection") template = config.env.get_template( 'policies.template.yaml.j2').render( sharing_policies=sharing_policies, VERSION=version, ) with betterboto_client.ClientContextManager( 'cloudformation', region_name=region) as cloudformation: cloudformation.create_or_update( StackName="servicecatalog-puppet-policies", TemplateBody=template, NotificationARNs=[ f"arn:aws:sns:{region}:{puppet_account_id}:servicecatalog-puppet-cloudformation-regional-events" ] if should_use_sns else [], ) for filename in glob('results/failure/*.json'): result = json.loads(open(filename, 'r').read()) click.echo( colorclass.Color("{red}" + result.get('task_type') + " failed{/red}")) click.echo( f"{yaml.safe_dump({'parameters':result.get('task_params')})}") click.echo("\n".join(result.get('exception_stack_trace'))) click.echo('') exit_status_codes = { LuigiStatusCode.SUCCESS: 0, LuigiStatusCode.SUCCESS_WITH_RETRY: 0, LuigiStatusCode.FAILED: 1, LuigiStatusCode.FAILED_AND_SCHEDULING_FAILED: 2, LuigiStatusCode.SCHEDULING_FAILED: 3, LuigiStatusCode.NOT_RUN: 4, LuigiStatusCode.MISSING_EXT: 5, } sys.exit(exit_status_codes.get(run_result.status))
def run_tasks( puppet_account_id, current_account_id, tasks_to_run, num_workers, is_dry_run=False, is_list_launches=None, execution_mode="hub", ): if is_list_launches: should_use_eventbridge = False should_forward_failures_to_opscenter = False else: should_use_eventbridge = (config.get_should_use_eventbridge( puppet_account_id, os.environ.get("AWS_DEFAULT_REGION")) and not is_dry_run) should_forward_failures_to_opscenter = ( config.get_should_forward_failures_to_opscenter( puppet_account_id, os.environ.get("AWS_DEFAULT_REGION")) and not is_dry_run) ssm_client = None if should_forward_failures_to_opscenter: with betterboto_client.ClientContextManager("ssm") as ssm: ssm_client = ssm entries = [] for result_type in [ "failure", "success", "timeout", "process_failure", "processing_time", "broken_task", ]: os.makedirs(Path(constants.RESULTS_DIRECTORY) / result_type) logger.info(f"About to run workflow with {num_workers} workers") tasks.print_stats() run_result = luigi.build( tasks_to_run, local_scheduler=True, detailed_summary=True, workers=num_workers, log_level="INFO", ) exit_status_codes = { LuigiStatusCode.SUCCESS: 0, LuigiStatusCode.SUCCESS_WITH_RETRY: 0, LuigiStatusCode.FAILED: 1, LuigiStatusCode.FAILED_AND_SCHEDULING_FAILED: 2, LuigiStatusCode.SCHEDULING_FAILED: 3, LuigiStatusCode.NOT_RUN: 4, LuigiStatusCode.MISSING_EXT: 5, } if is_list_launches: if is_list_launches == "table": table = [[ "account_id", "region", "launch", "portfolio", "product", "expected_version", "actual_version", "active", "status", ]] for filename in glob("output/ProvisionProductDryRunTask/*.json"): result = json.loads(open(filename, "r").read()) current_version = ( Color("{green}" + result.get("current_version") + "{/green}") if result.get("current_version") == result.get("new_version") else Color("{red}" + result.get("current_version") + "{/red}")) active = (Color("{green}" + str(result.get("active")) + "{/green}") if result.get("active") else Color("{red}" + str(result.get("active")) + "{/red}")) current_status = (Color("{green}" + result.get("current_status") + "{/green}") if result.get("current_status") == "AVAILABLE" else Color("{red}" + result.get("current_status") + "{/red}")) table.append([ result.get("params").get("account_id"), result.get("params").get("region"), result.get("params").get("launch_name"), result.get("params").get("portfolio"), result.get("params").get("product"), result.get("new_version"), current_version, active, current_status, ]) click.echo(terminaltables.AsciiTable(table).table) elif is_list_launches == "json": results = dict() for filename in glob("output/ProvisionProductDryRunTask/*.json"): result = json.loads(open(filename, "r").read()) account_id = result.get("params").get("account_id") region = result.get("params").get("region") launch_name = result.get("params").get("launch_name") results[f"{account_id}_{region}_{launch_name}"] = dict( account_id=account_id, region=region, launch=launch_name, portfolio=result.get("params").get("portfolio"), product=result.get("params").get("product"), expected_version=result.get("new_version"), actual_version=result.get("current_version"), active=result.get("active"), status=result.get("current_status"), ) click.echo(json.dumps( results, indent=4, default=str, )) else: raise Exception(f"Unsupported format: {is_list_launches}") else: click.echo("Results") if is_dry_run: table_data = [ [ "Result", "Launch", "Account", "Region", "Current Version", "New Version", "Notes", ], ] table = terminaltables.AsciiTable(table_data) for filename in glob("output/TerminateProductDryRunTask/*.json"): result = json.loads(open(filename, "r").read()) table_data.append([ result.get("effect"), result.get("params").get("launch_name"), result.get("params").get("account_id"), result.get("params").get("region"), result.get("current_version"), result.get("new_version"), result.get("notes"), ]) for filename in glob("output/ProvisionProductDryRunTask/*.json"): result = json.loads(open(filename, "r").read()) table_data.append([ result.get("effect"), result.get("params").get("launch_name"), result.get("params").get("account_id"), result.get("params").get("region"), result.get("current_version"), result.get("new_version"), result.get("notes"), ]) click.echo(table.table) else: table_data = [ ["Action", "Params", "Duration"], ] table = terminaltables.AsciiTable(table_data) for filename in glob("results/processing_time/*.json"): result_contents = open(filename, "r").read() result = json.loads(result_contents) params = result.get("params_for_results") if should_use_eventbridge: entries.append({ # 'Time': , "Source": constants.SERVICE_CATALOG_PUPPET_EVENT_SOURCE, "Resources": [ # 'string', ], "DetailType": result.get("task_type"), "Detail": result_contents, "EventBusName": constants.EVENT_BUS_IN_SPOKE_NAME if execution_mode == constants.EXECUTION_MODE_SPOKE else constants.EVENT_BUS_NAME, }) params = yaml.safe_dump(params) table_data.append([ result.get("task_type"), params, result.get("duration"), ]) click.echo(table.table) for filename in glob("results/failure/*.json"): result = json.loads(open(filename, "r").read()) params = result.get("params_for_results") if should_forward_failures_to_opscenter: title = f"{result.get('task_type')} failed: {params.get('launch_name')} - {params.get('account_id')} - {params.get('region')}" logging.info(f"Sending failure to opscenter: {title}") operational_data = {} for param_name, param in params.items(): operational_data[param_name] = { "Value": json.dumps(param, default=str), "Type": "SearchableString", } description = "\n".join( result.get("exception_stack_trace"))[-1024:] ssm_client.create_ops_item( Title=title, Description=description, OperationalData=operational_data, Priority=1, Source=constants. SERVICE_CATALOG_PUPPET_OPS_CENTER_SOURCE, Tags=[ { "Key": "ServiceCatalogPuppet:Actor", "Value": "ops-item" }, ], ) click.echo( colorclass.Color("{red}" + result.get("task_type") + " failed{/red}")) click.echo( f"{yaml.safe_dump({'parameters': result.get('task_params')})}" ) click.echo("\n".join(result.get("exception_stack_trace"))) click.echo("") if should_use_eventbridge: logging.info(f"Sending {len(entries)} events to eventbridge") with betterboto_client.CrossAccountClientContextManager( "events", f"arn:aws:iam::{current_account_id}:role/servicecatalog-puppet/PuppetRole", f"{current_account_id}-PuppetRole", ) as events: for i in range(0, len(entries), constants.EVENTBRIDGE_MAX_EVENTS_PER_CALL): events.put_events( Entries=entries[i:i + constants. EVENTBRIDGE_MAX_EVENTS_PER_CALL]) time.sleep(1) logging.info( f"Finished sending {len(entries)} events to eventbridge") sys.exit(exit_status_codes.get(run_result.status))
def success(string): print( colorclass.Color("{higreen}[" + POSITIVE + "]{/green}") + SEPARATOR + string)
def tentative(string): print( colorclass.Color("{hiyellow}[" + NEUTRAL + "]{/yellow}") + SEPARATOR + string)
def run_tasks_for_dry_run(tasks_to_run): for type in [ "failure", "success", "timeout", "process_failure", "processing_time", "broken_task", ]: os.makedirs(Path(constants.RESULTS_DIRECTORY) / type) run_result = luigi.build( tasks_to_run, local_scheduler=True, detailed_summary=True, workers=10, log_level='INFO', ) for filename in glob('results/failure/*.json'): result = json.loads(open(filename, 'r').read()) click.echo( colorclass.Color("{red}" + result.get('task_type') + " failed{/red}")) click.echo( f"{yaml.safe_dump({'parameters':result.get('task_params')})}") click.echo("\n".join(result.get('exception_stack_trace'))) click.echo('') exit_status_codes = { LuigiStatusCode.SUCCESS: 0, LuigiStatusCode.SUCCESS_WITH_RETRY: 0, LuigiStatusCode.FAILED: 1, LuigiStatusCode.FAILED_AND_SCHEDULING_FAILED: 2, LuigiStatusCode.SCHEDULING_FAILED: 3, LuigiStatusCode.NOT_RUN: 4, LuigiStatusCode.MISSING_EXT: 5, } click.echo("Dry run results") table_data = [ [ 'Result', 'Launch', 'Account', 'Region', 'Current Version', 'New Version', 'Notes' ], ] table = terminaltables.AsciiTable(table_data) for filename in glob('output/TerminateProductDryRunTask/*.json'): result = json.loads(open(filename, 'r').read()) table_data.append([ result.get('effect'), result.get('params').get('launch_name'), result.get('params').get('account_id'), result.get('params').get('region'), result.get('current_version'), result.get('new_version'), result.get('notes'), ]) for filename in glob('output/ProvisionProductDryRunTask/*.json'): result = json.loads(open(filename, 'r').read()) table_data.append([ result.get('effect'), result.get('params').get('launch_name'), result.get('params').get('account_id'), result.get('params').get('region'), result.get('current_version'), result.get('new_version'), result.get('notes'), ]) click.echo(table.table) sys.exit(exit_status_codes.get(run_result.status))
from pyload.utils.system import set_console_icon, set_console_title from . import iface from .__about__ import __credits__, __package__ standard_library.install_aliases() try: import colorclass except ImportError: colorclass = None autoblue = autogreen = autored = autowhite = autoyellow = lambda msg: msg else: for tag, reset, _, _ in (_f for _f in colorclass.list_tags() if _f): globals()[tag] = lambda msg: colorclass.Color("{{{0}}}{1}{{{2}}}". format(tag, msg, reset)) if os.name == 'nt': colorclass.Windows.enable(auto_colors=True, reset_atexit=True) elif colorclass.is_light(): colorclass.set_light_background() else: colorclass.set_dark_background() def _gen_logo(): text = os.linesep.join('{0}© {3} {1} <{2}>'.format(' ' * 15, *info) for info in __credits__) return autowhite(""" ____________ _ / | \ ___________ _ _______________ _ ___ / | ___/ | _ __ _ _| | ___ __ _ __| | \
def run_tasks(tasks_to_run): should_use_eventbridge = get_should_use_eventbridge( os.environ.get("AWS_DEFAULT_REGION")) should_forward_failures_to_opscenter = get_should_forward_failures_to_opscenter( os.environ.get("AWS_DEFAULT_REGION")) ssm_client = None if should_forward_failures_to_opscenter: with betterboto_client.ClientContextManager('ssm') as ssm: ssm_client = ssm entries = [] for type in [ "failure", "success", "timeout", "process_failure", "processing_time", "broken_task", ]: os.makedirs(Path(constants.RESULTS_DIRECTORY) / type) run_result = luigi.build( tasks_to_run, local_scheduler=True, detailed_summary=True, workers=10, log_level='INFO', ) table_data = [ [ 'Action', 'Launch', 'Account', 'Region', 'Portfolio', 'Product', 'Version', 'Duration' ], ] table = terminaltables.AsciiTable(table_data) for filename in glob('results/processing_time/*.json'): result_contents = open(filename, 'r').read() result = json.loads(result_contents) params = result.get('params_for_results') if should_use_eventbridge: entries.append({ # 'Time': , 'Source': constants.SERVICE_CATALOG_PUPPET_EVENT_SOURCE, 'Resources': [ # 'string', ], 'DetailType': result.get('task_type'), 'Detail': result_contents, 'EventBusName': constants.EVENT_BUS_NAME }) table_data.append([ result.get('task_type'), params.get('launch_name'), params.get('account_id'), params.get('region'), params.get('portfolio'), params.get('product'), params.get('version'), result.get('duration'), ]) click.echo(table.table) for filename in glob('results/failure/*.json'): result = json.loads(open(filename, 'r').read()) params = result.get('params_for_results') if should_forward_failures_to_opscenter: title = f"{result.get('task_type')} failed: {params.get('launch_name')} - {params.get('account_id')} - {params.get('region')}" logging.info(f"Sending failure to opscenter: {title}") ssm_client.create_ops_item( Title=title, Description="\n".join(result.get('exception_stack_trace')), OperationalData={ 'launch_name': { 'Value': params.get('launch_name'), 'Type': 'SearchableString' }, 'account_id': { 'Value': params.get('account_id'), 'Type': 'SearchableString' }, 'region': { 'Value': params.get('region'), 'Type': 'SearchableString' }, 'task_type': { 'Value': result.get('task_type'), 'Type': 'SearchableString' }, }, Priority=1, Source=constants.SERVICE_CATALOG_PUPPET_OPS_CENTER_SOURCE, ) click.echo( colorclass.Color("{red}" + result.get('task_type') + " failed{/red}")) click.echo( f"{yaml.safe_dump({'parameters':result.get('task_params')})}") click.echo("\n".join(result.get('exception_stack_trace'))) click.echo('') exit_status_codes = { LuigiStatusCode.SUCCESS: 0, LuigiStatusCode.SUCCESS_WITH_RETRY: 0, LuigiStatusCode.FAILED: 1, LuigiStatusCode.FAILED_AND_SCHEDULING_FAILED: 2, LuigiStatusCode.SCHEDULING_FAILED: 3, LuigiStatusCode.NOT_RUN: 4, LuigiStatusCode.MISSING_EXT: 5, } if should_use_eventbridge: logging.info(f"Sending {len(entries)} events to eventbridge") with betterboto_client.ClientContextManager('events') as events: for i in range(0, len(entries), constants.EVENTBRIDGE_MAX_EVENTS_PER_CALL): events.put_events( Entries=entries[i:i + constants.EVENTBRIDGE_MAX_EVENTS_PER_CALL]) time.sleep(1) logging.info(f"Finished sending {len(entries)} events to eventbridge") sys.exit(exit_status_codes.get(run_result.status))
def show_pipelines(p, format): pipeline_names = [f"{constants.BOOTSTRAP_STACK_NAME}-pipeline"] for portfolio_file_name in os.listdir(p): if '.yaml' in portfolio_file_name: p_name = portfolio_file_name.split(".")[0] portfolios_file_path = os.path.sep.join([p, portfolio_file_name]) portfolios = generate_portfolios(portfolios_file_path) for portfolio in portfolios.get('Portfolios', []): nested_products = portfolio.get('Products', []) + portfolio.get('Components', []) for product in nested_products: for version in product.get('Versions', []): pipeline_names.append( f"{p_name}-{portfolio.get('DisplayName')}-{product.get('Name')}-{version.get('Name')}-pipeline" ) for product in portfolios.get('Products', []): for version in product.get('Versions', []): pipeline_names.append( f"{product.get('Name')}-{version.get('Name')}-pipeline" ) results = {} for pipeline_name in pipeline_names: result = aws.get_details_for_pipeline(pipeline_name) status = result.get('status') if status == "Succeeded": status = "{green}" + status + "{/green}" elif status == "Failed": status = "{red}" + status + "{/red}" else: status = "{yellow}" + status + "{/yellow}" if len(result.get('sourceRevisions')) > 0: revision = result.get('sourceRevisions')[0] else: revision = { 'revisionId': 'N/A', 'revisionSummary': 'N/A', } results[pipeline_name] = { "name": pipeline_name, "status": result.get('status'), "revision_id": revision.get('revisionId'), "revision_summary": revision.get('revisionSummary').strip(), } if format == "table": table_data = [ ['Pipeline', 'Status', 'Last Commit Hash', 'Last Commit Message'], ] for result in results.values(): if result.get('status') == "Succeeded": status = f"{{green}}{result.get('status')}{{/green}}" elif result.get('status') == "Failed": status = f"{{red}}{result.get('status')}{{/red}}" else: status = f"{{yellow}}{result.get('status')}{{/yellow}}" table_data.append([ result.get('name'), colorclass.Color(status), result.get('revision_id'), result.get('revision_summary'), ]) table = terminaltables.AsciiTable(table_data) click.echo(table.table) elif format == "json": click.echo(json.dumps(results, indent=4, default=str))
def clr(s, *args, **kwrds): """Return a color string, optionally formatted.""" if args: s = s.format(*args, **kwrds) color = kwrds.get('color', 'autogreen') return colorclass.Color('{%s}%s{/%s}' % (color, s, color))