def test_app_config_init(self): filename = inspect.getframeinfo(inspect.currentframe()).filename self.cfg_file = f'{os.path.splitext(filename)[0]}.data' self.config = AppConfig(cfg_file=self.cfg_file, test=False) assert self.config is not None assert sorted(self.config.sections()) == sorted(self.VALID_SECTIONS)
def __init__(self) -> None: self.image_data = None self.inventory = None self.urls = None self.cli_args = args.CLIArgs().args self.app_cfg = AppConfig(self.cli_args.cfg or DEFAULT_APP_CONFIG) self.engine_cfg = AppConfig(self.cli_args.engine or DEFAULT_ENGINE_CONFIG) # Define the various directory paths and create the directories if needed self.dl_dir = self._build_image_download_dir() self.logfile_name = self._build_logfile_name() self.json_log_location = self._build_json_log_location() self.json_logfile = self._build_json_logfile_name() self.inv_pickle_file = self._build_pickle_filename() self.temp_storage_path = self._build_temp_storage() self._display_file_locations()
def execute(num_data_sets: int, max_records: int, cfg_file: str) -> List[str]: """ Given the parameters, generate the requested data files in the specified directory. The number of records per data set is a random value between [1, max_records]. The files will be generated in the log directory defined in the App cfg file provided. :param num_data_sets: Number of files to generate :param max_records: Max number of records per data set. :param cfg_file: PDL cfg file to use for generating data :return: List of files generated by routine. """ # Get the JSON data/log directory config = AppConfig(cfg_file=cfg_file, test=False) json_dir = build_json_log_location(cfg=config) # Generate the data data = generate_data(num_data_sets=int(num_data_sets), max_num_recs_per_file=int(max_records)) LOG.debug(pprint.pformat(data)) # Determine the number of records generated actual_count = sum([len(x.keys()) for x in data.values()]) max_count = int(num_data_sets) * int(max_records) LOG.info(f"Count: {actual_count} (MAX: {max_count})") # Write the dictionary to JSON files in the predetermined directory gen_files = [] for filename, data in data.items(): # Convert the ImageData objects to dictionaries (json-serializable) for data_id, data_obj in data.items(): data[data_id] = data_obj.to_dict() # Build the full filespec for the current dataset filepath = os.path.sep.join([json_dir, filename]) gen_files.append(filepath) # Write the data to file. with open(filepath, "w") as data_file: data_file.write(json.dumps(data)) LOG.info(f"Wrote data to '{filepath}'.") LOG.info("Done") return gen_files
class TestPropertiesConfig(object): VALID_SECTIONS = ['setup', 'test', 'teardown'] VALID_OPTIONS = ( 'teardown', ["report", "resources", "clear_resources", "abort_on_fail"]) INVALID_SECTION = 'section_does_not_exist' INVALID_OPTION = 'option_does_not_exist' VALID_SECTION_AND_ITEM_STR = ('teardown', 'report', '"results.xml"') VALID_SECTION_AND_ITEM_INT = ('setup', 'testcases', 10) VALID_SECTION_AND_ITEM_BOOL = ('teardown', 'clear_resources', True) VALID_SECTION_AND_ITEM_FLOAT = ('test', 'averages', 5.5) VALID_SECTION_AND_ITEM_LIST = ( 'teardown', 'resources', ['Nova', 'Neutron', 'Cinder']) INVALID_SECTION_AND_VALID_OPTION_LIST = ( INVALID_SECTION, 'resources', ['Nova', 'Neutron', 'Cinder']) VALID_SECTION_AND_INVALID_OPTION_LIST = ( 'teardown', INVALID_OPTION, ['Nova', 'Neutron', 'Cinder']) INVALID_CFG_FILE = 'DNE.txt' def setup(self): filename = inspect.getframeinfo(inspect.currentframe()).filename self.cfg_file = f'{os.path.splitext(filename)[0]}.data' self.config = AppConfig(cfg_file=self.cfg_file, test=True) def _print_config_file(self): with open(self.cfg_file) as CFG: lines = CFG.readlines() lines = '\t'.join(lines) print(f"CFG FILE ({self.cfg_file}):\n\t{lines}") def test_app_config_init(self): filename = inspect.getframeinfo(inspect.currentframe()).filename self.cfg_file = f'{os.path.splitext(filename)[0]}.data' self.config = AppConfig(cfg_file=self.cfg_file, test=False) assert self.config is not None assert sorted(self.config.sections()) == sorted(self.VALID_SECTIONS) def test_get_sections(self): expected_sections = ['setup', 'test', 'teardown'] self._print_config_file() assert_equals(self.config.sections(), expected_sections) def test_get_options_populated(self): actual_options = self.config.get_options(section=self.VALID_OPTIONS[0]) assert_equals(sorted(actual_options), sorted(self.VALID_OPTIONS[1])) @raises(ConfigSectionDoesNotExist) def test_get_options_non_existent_section(self): self.config.get_options(section=self.INVALID_SECTION) @raises(ConfigParser.NoSectionError) def test_get_option_non_existent_section(self): self.config.get( section=self.INVALID_SECTION, option=self.INVALID_OPTION) @raises(ConfigParser.NoOptionError) def test_get_option_non_existent_option(self): self.config.get( section=self.VALID_SECTIONS[-1], option=self.INVALID_OPTION) def test_get_option_valid_section_valid_option(self): data = self.VALID_SECTION_AND_ITEM_STR expected_value = data[2] actual_value = self.config.get( section=data[0], option=data[1]) assert_true(isinstance(actual_value, str)) assert_equals(actual_value, expected_value) def test_get_option_valid_section_valid_int(self): data = self.VALID_SECTION_AND_ITEM_INT expected_value = data[2] actual_value = self.config.getint( section=data[0], option=data[1]) assert_true(isinstance(actual_value, int)) assert_equals(actual_value, expected_value) def test_get_option_valid_section_valid_boolean(self): data = self.VALID_SECTION_AND_ITEM_BOOL expected_value = data[2] actual_value = self.config.getboolean( section=data[0], option=data[1]) assert_true(isinstance(actual_value, bool)) assert_equals(actual_value, expected_value) def test_get_option_valid_section_valid_float(self): data = self.VALID_SECTION_AND_ITEM_FLOAT expected_value = data[2] actual_value = self.config.getfloat( section=data[0], option=data[1]) assert_true(isinstance(actual_value, float)) assert_equals(actual_value, expected_value) @raises(ValueError) def test_get_option_valid_section_valid_str_illegal_cast_float(self): data = self.VALID_SECTION_AND_ITEM_STR self.config.getfloat(section=data[0], option=data[1]) def test_get_option_valid_section_valid_list(self): data = self.VALID_SECTION_AND_ITEM_LIST expected_value = data[2] actual_value = self.config.get_list( section=data[0], option=data[1]) assert_true(isinstance(actual_value, list)) assert_equals(actual_value, expected_value) @raises(ConfigParser.NoSectionError) def test_get_option_invalid_section_valid_option_valid_list(self): data = self.INVALID_SECTION_AND_VALID_OPTION_LIST self.config.get_list(section=data[0], option=data[1]) @raises(ConfigParser.NoOptionError) def test_get_option_valid_section_invalid_option_valid_list(self): data = self.VALID_SECTION_AND_INVALID_OPTION_LIST self.config.get_list(section=data[0], option=data[1])
def setup(self): filename = inspect.getframeinfo(inspect.currentframe()).filename self.cfg_file = f'{os.path.splitext(filename)[0]}.data' self.config = AppConfig(cfg_file=self.cfg_file, test=True)
def test_appcfg_raises_exc_with_cfg_file_specified_as_none(self): AppConfig(cfg_file=None)
def test_appcfg_raises_exc_with_no_cfg_file_specified(self): AppConfig(cfg_file='')
def test_if_non_existent_cfg_file_for_init_raises_error(self): AppConfig(cfg_file=TestPropertiesConfig.INVALID_CFG_FILE, test=False)
class PdlConfig: """ This is a general data object, but requires some internal processing to generate the various attributes based on the input from the configuration file (e.g. - file paths) """ def __init__(self) -> None: self.image_data = None self.inventory = None self.urls = None self.cli_args = args.CLIArgs().args self.app_cfg = AppConfig(self.cli_args.cfg or DEFAULT_APP_CONFIG) self.engine_cfg = AppConfig(self.cli_args.engine or DEFAULT_ENGINE_CONFIG) # Define the various directory paths and create the directories if needed self.dl_dir = self._build_image_download_dir() self.logfile_name = self._build_logfile_name() self.json_log_location = self._build_json_log_location() self.json_logfile = self._build_json_logfile_name() self.inv_pickle_file = self._build_pickle_filename() self.temp_storage_path = self._build_temp_storage() self._display_file_locations() def _build_image_download_dir(self) -> str: """ Builds the image download directory, and create the directory if necessary. :return: (str) Absolute path to the DL directory """ dl_dir = self.app_cfg.get(AppCfgFileSections.STORAGE, AppCfgFileSectionKeys.LOCAL_DIR) dl_drive = self.app_cfg.get(AppCfgFileSections.STORAGE, AppCfgFileSectionKeys.LOCAL_DRIVE_LETTER) if dl_drive not in [None, '']: dl_dir = f"{dl_drive.strip(':')}:{dl_dir}" dl_dir = os.path.abspath(dl_dir) utils.check_if_location_exists(location=dl_dir, create_dir=True) return dl_dir def _build_logfile_name(self) -> str: """ Builds the logging directory, and create the directory if necessary. :return: (str) Absolute path to the logging directory """ logfile_name = AppLogging.build_logfile_name(cfg_info=self.app_cfg) log_dir = os.path.abspath( os.path.sep.join(logfile_name.split(os.path.sep)[0:-1])) utils.check_if_location_exists(location=log_dir, create_dir=True) return logfile_name def _build_json_log_location(self) -> str: """ Builds the JSON inventory logging directory, and create the directory if necessary. :return: (str) Absolute path to the JSON inventory logging directory """ json_log_location = self.app_cfg.get( AppCfgFileSections.LOGGING, AppCfgFileSectionKeys.JSON_FILE_DIR) json_drive = self.app_cfg.get(AppCfgFileSections.LOGGING, AppCfgFileSectionKeys.LOG_DRIVE_LETTER) if json_drive not in [None, '']: json_log_location = f"{json_drive.strip(':')}:{json_log_location}" json_log_location = os.path.abspath(json_log_location) utils.check_if_location_exists(location=json_log_location, create_dir=True) return json_log_location def _build_json_logfile_name(self) -> str: """ Builds the JSON inventory log file name. :return: (str) Absolute path to the JSON inventory file name. """ # Get logging prefix and suffix add_ons = [ self.app_cfg.get(AppCfgFileSections.LOGGING, AppCfgFileSectionKeys.PREFIX), self.app_cfg.get(AppCfgFileSections.LOGGING, AppCfgFileSectionKeys.SUFFIX) ] # Isolate the timestamp out of the logfile name. log_name = self.logfile_name.split(os.path.sep)[-1] timestamp = '-'.join(log_name.split('_')[0:-1]) for update in add_ons: if update is not None: timestamp = timestamp.replace(update, '') # Build the file name filename = f"{timestamp}.{JsonLog.EXTENSION}" # Build the full file spec filename = os.path.abspath( os.path.sep.join([self.json_log_location, filename])) return filename def _build_pickle_filename(self) -> str: """ Builds the general inventory pickled (binary) data file. :return: (str) Absolute path to the pickled (binary) data file name. """ pickle_location = self.json_log_location pickle_filename = "{0}{1}".format( self.engine_cfg.get(ProjectCfgFileSections.PYTHON_PROJECT, ProjectCfgFileSectionKeys.NAME).upper(), PICKLE_EXT) pickle_filename = os.path.abspath( os.path.sep.join([pickle_location, pickle_filename])) utils.check_if_location_exists(location=pickle_location, create_dir=True) return pickle_filename def _build_temp_storage(self) -> str: """ Builds the temp (local) file storage directory. :return: (str) Absolute path to the temp (local) file storage directory. """ storage_location = self.app_cfg.get( AppCfgFileSections.STORAGE, AppCfgFileSectionKeys.TEMP_STORAGE_PATH) storage_drive = self.app_cfg.get( AppCfgFileSections.STORAGE, AppCfgFileSectionKeys.TEMP_STORAGE_DRIVE) if storage_drive not in [None, '']: storage_location = f"{storage_drive.strip(':')}:{storage_location}" storage_location = os.path.abspath(storage_location) utils.check_if_location_exists(location=storage_location, create_dir=True) return storage_location def _display_file_locations(self) -> None: """ Lists/logs the locations of the various configured/generated directories. Used for reference and debugging. :return: None """ # Build and configure the table table = prettytable.PrettyTable() table.field_names = ['File Type', 'Location'] for col in table.field_names: table.align[col] = 'l' # Configure the table data (use Ordered Dict to maintain order in table, # based on importance of data) setup = OrderedDict([('DL Directory', self.dl_dir), ('DL Log File', self.logfile_name), ('JSON Data File', self.json_logfile), ('Binary Inv File', self.inv_pickle_file), ('Temp Storage', self.temp_storage_path)]) # Populate the table for name, data in setup.items(): table.add_row([name, data]) # Display results ReportingSummary.log_table(table.get_string(title="FILE INFORMATION"))
def main_routine(): """ Primary routine: * Get list of non-consolidated JSON files based on info from config file * Read all JSON files into a common structure, and combine any identical items * Write data to file. * Verify all data in consolidated file matches the source files. * Delete source JSON files that were consolidated (and verify all files were deleted) :return: None """ length = 80 border = "=" * length # Parse CLI for specified config file cfg_name = parse_cli().cfg LOG.info(border) LOG.info(f" USING CFG: {cfg_name}") LOG.info(border) # Determine the JSON log directory from the config file log_location = build_json_log_location(cfg=AppConfig(cfg_file=cfg_name)) # Get a list of the JSON files in the 'log_location' directory LOG.info(border) LOG.info(" GET LISTING OF JSON FILES TO CONSOLIDATE.") LOG.info(border) inv = JsonInventory(dir_location=log_location) json_files = inv.get_json_files() # Find all files that DO NOT HAVE the consolidated base filename. # Put data from all matching files into a single dictionary. data_files = [x for x in json_files if not x.split(os.path.sep)[-1].startswith(BASE_FILE_NAME)] records = read_files(files=data_files) # Determine name and location of where CONSOLIDATED JSON file consolidated_log = determine_consolidate_file_name(files=json_files, target_dir=log_location) LOG.info(f"Consolidating to: {consolidated_log}") # Create the CONSOLIDATED JSON file write_json_file(data=records, filename=consolidated_log) # Verify CONSOLIDATED JSON file matches the original data LOG.info(border) LOG.info(" VERIFY CONTENTS MATCH ORIGINAL FILES") LOG.info(border) success = verify_records_match( original=read_files(files=data_files), consolidated=read_json_file(consolidated_log)) # Verify all <data>.json files have been deleted if success: # Delete files LOG.info(border) LOG.info(" DELETING NON-CONSOLIDATED JSON FILES") LOG.info(border) for data_file in data_files: os.remove(data_file) LOG.info(f"Deleted {data_file}") # Get updated list of JSON files in the directory json_files = inv.get_json_files() data_files = [x for x in json_files if BASE_FILE_NAME not in x] # data_files should be empty (no <data>.JSON files) if data_files: LOG.error("JSON data files NOT deleted:") for data_file in data_files: LOG.error(f"+ {data_file}") else: LOG.info(border) LOG.info(f" CONSOLIDATION SUCCESSFUL. {consolidated_log}") LOG.info(border)