def generate_ontology(self, base_json_file_path: str, destination_dir: Optional[str] = None, is_dry_run: bool = False) -> Optional[str]: r"""Function to generate and save the python ontology code after reading ontology from the input json file. Args: base_json_file_path: The base json config file. destination_dir: The folder in which config packages are to be generated. If not provided, current working directory is used. Ignored if `is_dry_run` is `True`. is_dry_run: if `True`, creates the ontology in the temporary directory, else, creates the ontology in the `destination_dir`. Returns: Directory path in which the modules are created: either one of the temporary directory or `destination_dir`. """ self.tempdir = tempfile.mkdtemp() # Update the list of directories to be examined for imported configs self.json_paths.extend([ os.path.dirname(os.path.realpath(base_json_file_path)), os.path.dirname(os.path.realpath('ft/onto')), os.getcwd() ]) # Adding the imported objects to the allowed types. for import_module in self.required_imports: for obj_str in utils.get_user_objects_from_module(import_module): full_obj_str = f"{import_module}.{obj_str}" self.allowed_types_tree[full_obj_str] = set() self.ref_to_full_name[obj_str] = full_obj_str # Generate ontology classes for the input json config and the configs # it is dependent upon. destination_dir = os.getcwd() if destination_dir is None \ else destination_dir self.parse_ontology(base_json_file_path, destination_dir) # When everything is successfully completed, copy the contents of # `self.tempdir` to the provided folder. if not is_dry_run: generated_top_dirs = set(utils.get_top_level_dirs(self.tempdir)) for existing_top_dir in utils.get_top_level_dirs(destination_dir): if existing_top_dir in generated_top_dirs: warnings.warn(f"DirectoryAlreadyPresent: " f"The directory with the name " f"{existing_top_dir} is already present in " f"{destination_dir}. Merging into the " f"existing directory.") dir_util.copy_tree(self.tempdir, destination_dir) return destination_dir return self.tempdir
def generate( self, spec_path: str, destination_dir: str = os.getcwd(), is_dry_run: bool = False, include_init: bool = True, merged_path: Optional[str] = None, lenient_prefix=False, ) -> Optional[str]: r"""Function to generate and save the python ontology code after reading ontology from the input json file. This is the main entry point to the class. Args: spec_path: The input ontology specification file, which should be a json file. destination_dir: The folder in which config packages are to be generated. If not provided, current working directory is used. Ignored if `is_dry_run` is `True`. is_dry_run: if `True`, creates the ontology in the temporary directory, else, creates the ontology in the `destination_dir`. include_init: if `True`, generates `__init__.py` in the already existing directories, otherwise only generates `__init__.py` in the generated directories. merged_path: if a path is provided, a merged ontology file will be written at this path. lenient_prefix: if `True`, will not enforce the entry name to match a known prefix. Returns: Directory path in which the modules are created: either one of the temporary directory or `destination_dir`. """ # Update the list of directories to be examined for imported configs self.import_dirs.append(os.path.dirname(os.path.realpath(spec_path))) merged_schemas: List[Dict] = [] merged_prefixes: List[str] = [] # Generate ontology classes for the input json config and the configs # it is dependent upon. try: self.parse_ontology_spec( spec_path, merged_schema=merged_schemas, merged_prefixes=merged_prefixes, lenient_prefix=lenient_prefix, ) except OntologySpecError: logging.error("Error at parsing [%s]", spec_path) raise # Now generate all data. # A temporary directory to save the generated file structure until the # generation is completed and verified. tempdir = tempfile.mkdtemp() # Starting from here, we won't add any more modules to import. self.import_managers.fix_all_modules() logging.info("Working on %s", spec_path) for writer in self.module_writers.writers(): logging.info("Writing module: %s", writer.module_name) writer.write(tempdir, destination_dir, include_init) logging.info("Done writing.") if merged_path is not None: logging.info("Writing merged schema at %s", merged_path) merged_config = { "name": "all_ontology", "definitions": merged_schemas, "additional_prefixes": list(set(merged_prefixes)), } with open(merged_path, "w") as out: json.dump(merged_config, out, indent=2) logging.info("Done writing.") # When everything is successfully completed, copy the contents of # `self.tempdir` to the provided folder. if not is_dry_run: generated_top_dirs = set(utils.get_top_level_dirs(tempdir)) for existing_top_dir in utils.get_top_level_dirs(destination_dir): if existing_top_dir in generated_top_dirs: logging.warning( "The directory with the name " "%s is already present in " "%s. New files will be merge into the " "existing directory.", existing_top_dir, destination_dir, ) utils.copytree( tempdir, destination_dir, ignore_pattern_if_file_exists="*/__init__.py", ) return destination_dir return tempdir