def __init__(self, start_date, end_date, user_config=None): """Initialize the generator.""" if not self.TEMPLATE: raise AttributeError("Class attribute 'TEMPLATE' must be defined.") if not self.TEMPLATE_KWARGS: raise AttributeError( "Class attribute 'TEMPLATE_KWARGS' must be defined.") LOADER_LIST.append(PackageLoader("nise")) if user_config: LOADER_LIST.append( FileSystemLoader(os.path.abspath( os.path.dirname(user_config)))) env = Environment(loader=ChoiceLoader(LOADER_LIST)) env.globals["faker"] = faker_passthrough default_template = env.get_template(self.TEMPLATE) if user_config: user_template = env.get_template(os.path.basename(user_config)) user_yaml = load_yaml(user_template.render(**self.TEMPLATE_KWARGS)) # sort lists of dicts so that generator class names align. generators = user_yaml.get("generators") user_yaml["generators"] = sorted(generators, key=lambda d: list(d.keys())) default_yaml = load_yaml( default_template.render(**self.TEMPLATE_KWARGS)) config = deepupdate( default_yaml, user_yaml ) # merge user-supplied static file with base template else: config = load_yaml(default_template.render(**self.TEMPLATE_KWARGS)) # handle special-cases in YAML config syntax config = self._format_config(config) # remove top-level class name self.config = [] for generators in config.get("generators"): for key, val in generators.items(): if key == type(self).__name__: self.config.append(val) self.start_date = self._set_date_config(start_date, "start") self.end_date = self._set_date_config(end_date, "end") self.hours = self._set_hours() self.days = self._set_days() LOG.debug("Current config: %s", pformat(self.config)) super().__init__()
def __init__(self, start_date, end_date, user_config=None): """Initialize the generator.""" # generate the same number of elements as the static file, if there is one # this is needed to ensure that deepupdate() works correctly. gen_count = randint(2, 6) if user_config: preload = load_yaml(user_config) seen = {} for generators in preload.get("generators"): for key in generators.keys(): if key in seen.keys(): seen[key] += 1 else: seen[key] = 1 name = type(self).__name__ if name in seen: gen_count = seen[name] self._gen_fake_data(gen_count) super().__init__(start_date, end_date, user_config=user_config) tag_cols = [] self._tags = {} for cfg in self.config: for key in cfg.get("tags", {}).keys(): tag_cols.append(str(key)) self.num_instances = len(self.config) if tag_cols: self.RESOURCE_TAG_COLS.update(tag_cols) self.AWS_COLUMNS.update(tag_cols)
def __init__(self, start_date, end_date, cache={}, user_config=None): """Initialize the generator.""" # generate the same number of elements as the static file, if there is one # this is needed to ensure that deepupdate() works correctly. gen_count = randint(2, 6) if user_config: preload = load_yaml(user_config) seen = {} for generators in preload.get("generators"): for key in generators.keys(): if key in seen.keys(): seen[key] += 1 else: seen[key] = 1 name = type(self).__name__ if name in seen: gen_count = seen[name] self._gen_fake_data(gen_count) # pass an element of the instance_id defaults into the template svcname, svctype = self._get_accts_str(self.SERVICE_NAME) self.TEMPLATE_KWARGS["_service_name"] = "{}/{}".format( svcname, svctype[:-1]) super().__init__(start_date, end_date, user_config=user_config) self._meter_cache = cache # Azure end_date is always the following day self.end_date += relativedelta(days=1)
def load_static_report_data(options): """Load and set start and end dates if static file is provided.""" if not options.get("static_report_file"): options["start_date"] = datetime.now().replace(day=1, hour=0, minute=0, second=0, microsecond=0) options["end_date"] = datetime.now().replace(hour=0, minute=0, second=0, microsecond=0) return options LOG.info("Loading static data...") start_dates = {} end_dates = {} static_report_data = load_yaml(options.get("static_report_file")) for generator_dict in static_report_data.get("generators"): for genname, attributes in generator_dict.items(): generated_start_date = calculate_start_date( attributes.get("start_date")) start_dates[genname] = generated_start_date if attributes.get("end_date"): generated_end_date = calculate_end_date( generated_start_date, attributes.get("end_date")) else: generated_end_date = today() if options.get("provider") == "azure": generated_end_date += timedelta(hours=24) else: generated_end_date = generated_end_date.replace(hour=23, minute=59) end_dates[genname] = generated_end_date options["gen_starts"] = start_dates options["gen_ends"] = end_dates options["start_date"] = min(start_dates.values()) latest_date = max(end_dates.values()) last_day_of_month = calendar.monthrange(year=latest_date.year, month=latest_date.month)[1] if latest_date.month == datetime.now( ).month and latest_date.year == datetime.now().year: last_day_of_month = datetime.now( ).day # don't generate date into the future. options["end_date"] = latest_date.replace(day=last_day_of_month, hour=0, minute=0) return options
def _load_static_report_data(options): """Validate/load and set start_date if static file is provided.""" if not options.get("static_report_file"): return static_file = options.get("static_report_file") if not os.path.exists(static_file): LOG.error(f"file does not exist: '{static_file}'") sys.exit() LOG.info("Loading static data...") aws_tags = set() start_dates = [] end_dates = [] static_report_data = load_yaml(static_file) for generator_dict in static_report_data.get("generators"): for _, attributes in generator_dict.items(): start_date = get_start_date(attributes, options) generated_start_date = calculate_start_date(start_date) start_dates.append(generated_start_date) if attributes.get("end_date"): generated_end_date = calculate_end_date( generated_start_date, attributes.get("end_date")) elif options.get("end_date") and options.get( "end_date").date() != today().date(): generated_end_date = calculate_end_date( generated_start_date, options.get("end_date")) else: generated_end_date = today() if options.get("provider") == "azure": generated_end_date += datetime.timedelta(hours=24) end_dates.append(generated_end_date) attributes["start_date"] = str(generated_start_date) attributes["end_date"] = str(generated_end_date) if options.get("provider") == "aws": aws_tags.update(attributes.get("tags", {}).keys()) options["start_date"] = min(start_dates) latest_date = max(end_dates) last_day_of_month = calendar.monthrange(year=latest_date.year, month=latest_date.month)[1] options["end_date"] = latest_date.replace(day=last_day_of_month, hour=0, minute=0) options["static_report_data"] = static_report_data if options.get("provider") == "aws" and aws_tags: options["aws_tags"] = aws_tags return True
def process_template(self, args, config=None): """Process specific provider configs to produce yamls.""" from nise.util import load_yaml yaml_file = load_yaml(args.config_file_name) if yaml_file.get("ocp-on-aws"): replace_args(args, yaml_file.get("ocp-on-aws").get("ocp"), "ocp", "ocp-on-aws") # First OCP: config = get_validated_config(self.ocp, args) data = run_generator(self.ocp, args, config) id_labels = get_resourceid_and_tags(data) # AWS: replace_args(args, yaml_file.get("ocp-on-aws").get("aws"), "aws", "ocp-on-aws") self.aws = self.aws(id_labels) config = get_validated_config(self.aws, args) run_generator(self.aws, args, config) if yaml_file.get("ocp-on-azure"): replace_args(args, yaml_file.get("ocp-on-azure").get("ocp"), "ocp", "ocp-on-azure") # Second OCP: config = get_validated_config(self.ocp, args) data = run_generator(self.ocp, args, config) id_labels = get_resourceid_and_tags(data) # Azure replace_args(args, yaml_file.get("ocp-on-azure").get("azure"), "azure", "ocp-on-azure") self.azure = self.azure(id_labels) config = get_validated_config(self.azure, args) run_generator(self.azure, args, config)
def test_load_yaml(self): """ Test to load static report yaml file. """ data = load_yaml("tests/aws_static_report.yml") self.assertIsNotNone(data)
def gcp_create_report(options): # noqa: C901 """Create a GCP cost usage report file.""" fake = Faker() report_prefix = options.get("gcp_report_prefix") or fake.word() gcp_bucket_name = options.get("gcp_bucket_name") start_date = options.get("start_date") end_date = options.get("end_date") projects = [] if options.get("static_report_file"): config = load_yaml(options.get("static_report_file")) project_gens = list( filter(lambda x: "ProjectGenerator" in x, config.get("generators"))) projects = [] for gen in project_gens: project_generator = ProjectGenerator( gen.get("ProjectGenerator", {}).get("Account ID")) projects = projects + [ prj for prj in project_generator.generate_projects() ] else: account = "{}-{}".format(fake.word(), fake.word()) project_generator = ProjectGenerator(account) projects = projects + [ prj for prj in project_generator.generate_projects() ] data = {} for project in projects: num_gens = len(GCP_GENERATORS) ten_percent = int(num_gens * 0.1) if num_gens > 50 else 5 LOG.info( f"Producing data for {num_gens} generators for GCP Project '{project}'." ) for count, generator in enumerate(GCP_GENERATORS): gen = generator(start_date, end_date, project, user_config=options.get("static_report_file")) generated_data = gen.generate_data() for key, item in generated_data.items(): if key in data: data[key] += item else: data[key] = item count += 1 if count % ten_percent == 0: LOG.info(f"Done with {count} of {num_gens} generators.") monthly_files = [] for day, daily_data in data.items(): output_file_name = "{}-{}.csv".format(report_prefix, day.strftime("%Y-%m-%d")) output_file_path = os.path.join(os.getcwd(), output_file_name) monthly_files.append(output_file_path) _write_csv(output_file_path, daily_data, GCP_REPORT_COLUMNS) if gcp_bucket_name: gcp_route_file(gcp_bucket_name, output_file_path, output_file_name) write_monthly = options.get("write_monthly", False) if not write_monthly: _remove_files(monthly_files)