def _get_template_engine(ctx): """ Initialize the template engine environment """ global engine_cache if engine_cache is not None: return engine_cache loader_map = {} loader_map[""] = FileSystemLoader( os.path.join(Project.get().project_path, "templates")) for name, module in Project.get().modules.items(): template_dir = os.path.join(module._path, "templates") if os.path.isdir(template_dir): loader_map[name] = FileSystemLoader(template_dir) # init the environment env = Environment(loader=PrefixLoader(loader_map), undefined=jinja2.StrictUndefined) env.context_class = ResolverContext # register all plugins as filters for name, cls in ctx.get_compiler().get_plugins().items(): def curywrapper(func): def safewrapper(*args): return JinjaDynamicProxy.return_value(func(*args)) return safewrapper env.filters[name.replace("::", ".")] = curywrapper(cls) engine_cache = env return env
def setUp(self): project = Project(self.project_dir, autostd=False) if self.mainfile is not None: project.main_file = self.mainfile Project.set(project) self.state_dir = tempfile.mkdtemp() config.Config.set("config", "state-dir", self.state_dir)
def load_project(venv_path: Optional[str]) -> None: if venv_path is None: project: Project = Project(project_dir) else: project: Project = Project(project_dir, venv_path=venv_path) Project.set(project) project.load()
def test_code_manager(): """Verify the code manager""" project_dir: str = os.path.join(os.path.dirname(os.path.abspath(__file__)), "data", "plugins_project") project: Project = Project(project_dir) Project.set(project) project.load() ModuleTool().install("single_plugin_file") ModuleTool().install("multiple_plugin_files") import inmanta_plugins.multiple_plugin_files.handlers as multi import inmanta_plugins.single_plugin_file as single mgr = loader.CodeManager() mgr.register_code("std::File", single.MyHandler) mgr.register_code("std::Directory", multi.MyHandler) def assert_content(source_info: SourceInfo, handler) -> str: filename = inspect.getsourcefile(handler) content: str with open(filename, "r", encoding="utf-8") as fd: content = fd.read() assert source_info.content == content assert len(source_info.hash) > 0 return content # get types types = dict(mgr.get_types()) assert "std::File" in types assert "std::Directory" in types single_type_list: List[SourceInfo] = types["std::File"] multi_type_list: List[SourceInfo] = types["std::Directory"] assert len(single_type_list) == 1 single_content: str = assert_content(single_type_list[0], single.MyHandler) assert len(multi_type_list) == 3 multi_content: str = assert_content( next(s for s in multi_type_list if s.module_name == "inmanta_plugins.multiple_plugin_files.handlers"), multi.MyHandler) # get_file_hashes mgr_contents: Set[str] = { mgr.get_file_content(hash) for hash in mgr.get_file_hashes() } assert single_content in mgr_contents assert multi_content in mgr_contents with pytest.raises(KeyError): mgr.get_file_content("test") # register type without source with pytest.raises(loader.SourceNotFoundException): mgr.register_code("test2", str)
def list(self, requires=False): """ List all modules in a table """ table = [] name_length = 10 version_length = 10 project = Project.get() project.get_complete_ast() names = sorted(project.modules.keys()) specs = project.collect_imported_requirements() for name in names: name_length = max(len(name), name_length) mod = Project.get().modules[name] version = str(mod.version) if name not in specs: specs[name] = [] try: if project._install_mode == InstallMode.master: reqv = "master" else: release_only = project._install_mode == InstallMode.release versions = Module.get_suitable_version_for( name, specs[name], mod._path, release_only=release_only) if versions is None: reqv = "None" else: reqv = str(versions) except Exception: LOGGER.exception("Problem getting version for module %s" % name) reqv = "ERROR" version_length = max(len(version), len(reqv), version_length) table.append((name, version, reqv, version == reqv)) if requires: print("requires:") for name, version, reqv, _ in table: print(" - %s==%s" % (name, version)) else: t = texttable.Texttable() t.set_deco(texttable.Texttable.HEADER | texttable.Texttable.BORDER | texttable.Texttable.VLINES) t.header(("Name", "Installed version", "Expected in project", "Matches")) for row in table: t.add_row(row) print(t.draw())
def sources(cls): """ Get all source files that define resources """ resource_to_sources = {} for resource, providers in cls.__command_functions.items(): sources = {} resource_to_sources[resource] = sources for provider in providers.values(): file_name = inspect.getsourcefile(provider) source_code = "" with open(file_name, "r") as fd: source_code = fd.read() sha1sum = hashlib.new("sha1") sha1sum.update(source_code.encode("utf-8")) hv = sha1sum.hexdigest() if hv not in sources: module_name = provider.__module__.split(".")[1] req = Project.get( ).modules[module_name].get_python_requirements_as_list() sources[hv] = (file_name, provider.__module__, source_code, req) return resource_to_sources
def sources(cls) -> dict: """ Get all source files that define resources """ resource_to_sources = {} for name, (resource, _options) in cls._resources.items(): sources = {} resource_to_sources[name] = sources file_name = inspect.getsourcefile(resource) source_code = "" with open(file_name, "r") as fd: source_code = fd.read() sha1sum = hashlib.new("sha1") sha1sum.update(source_code.encode("utf-8")) hv = sha1sum.hexdigest() if hv not in sources: module_name = resource.__module__.split(".")[1] req = Project.get( ).modules[module_name].get_python_requirements_as_list() sources[hv] = (file_name, resource.__module__, source_code, req) return resource_to_sources
def compile(self): """ This method will compile and prepare everything to start evaluation the configuration specification. This method will: - load all namespaces - compile the __config__ namespace - start resolving it and importing unknown namespaces """ project = Project.get() self.__root_ns = project.get_root_namespace() project.load() statements, blocks = project.get_complete_ast() # load plugins for name, cls in PluginMeta.get_functions().items(): mod_ns = cls.__module__.split(".") if mod_ns[0] != "inmanta_plugins": raise Exception( "All plugin modules should be loaded in the impera_plugins package not in %s" % cls.__module__) mod_ns = mod_ns[1:] ns = self.__root_ns for part in mod_ns: if ns is None: break ns = ns.get_child(part) if ns is None: raise Exception( "Unable to find namespace for plugin module %s" % (cls.__module__)) cls.namespace = ns name = name.split("::")[-1] statement = PluginStatement(ns, name, cls) statements.append(statement) # add the entity type (hack?) entity = DefineEntity(self.__root_ns.get_child_or_create("std"), "Entity", "The entity all other entities inherit from.", [], []) requires_rel = DefineRelation( ("std::Entity", "requires", [0, None], False), ("std::Entity", "provides", [0, None], False)) requires_rel.namespace = self.__root_ns.get_ns_from_string("std") statements.append(entity) statements.append(requires_rel) return (statements, blocks)
def requires(self) -> List[str]: """List of python requirements associated with this source file""" from inmanta.module import Project if self._requires is None: self._requires = Project.get().modules[self._get_module_name( )].get_strict_python_requirements_as_list() return self._requires
def determine_path(ctx, module_dir, path): """ Determine the real path based on the given path """ parts = path.split(os.path.sep) modules = Project.get().modules if parts[0] == "": module_path = Project.get().project_path elif parts[0] not in modules: raise Exception("Module %s does not exist for path %s" % (parts[0], path)) else: module_path = modules[parts[0]]._path return os.path.join(module_path, module_dir, os.path.sep.join(parts[1:]))
def run_snippet(self, snippet): project_dir = tempfile.mkdtemp() os.symlink(self.env, os.path.join(project_dir, ".env")) with open(os.path.join(project_dir, "project.yml"), "w") as cfg: cfg.write(""" name: snippet test modulepath: %s downloadpath: %s version: 1.0 repo: ['[email protected]:modules/', '[email protected]:config/']""" % (self.libs, self.libs)) with open(os.path.join(project_dir, "main.cf"), "w") as x: x.write(snippet) Project.set(Project(project_dir)) compiler.do_compile()
def get_siblings(self) -> Iterator["SourceInfo"]: """ Returns an iterator over SourceInfo objects for all plugin source files in this Inmanta module (including this one). """ from inmanta.module import Project return starmap( SourceInfo, Project.get().modules[self._get_module_name()].get_plugin_files())
def setup_for_snippet(self, snippet, autostd=True): # init project self.project_dir = tempfile.mkdtemp() os.symlink(self.__class__.env, os.path.join(self.project_dir, ".env")) with open(os.path.join(self.project_dir, "project.yml"), "w") as cfg: cfg.write(""" name: snippet test modulepath: [%s, %s] downloadpath: %s version: 1.0 repo: ['https://github.com/inmanta/']""" % (self.__class__.libs, os.path.join(os.path.dirname(os.path.abspath(__file__)), "data", "modules"), self.__class__.libs)) with open(os.path.join(self.project_dir, "main.cf"), "w") as x: x.write(snippet) Project.set(Project(self.project_dir, autostd=autostd))
def test_requirements_from_source_info(tmpdir): """Test the code path used by the exporter""" common.makeproject(tmpdir, "test-project", deps=[("mod1", "")], imports=["mod1"]) project_dir = os.path.join(tmpdir, "test-project") libs_dir = os.path.join(project_dir, "libs") common.makemodule(libs_dir, "mod1", project=False) mod1 = os.path.join(libs_dir, "mod1") mod1_req_txt = """# I'm a comment pytest\ >=\ 1.5 iplib>=0.0.1 """ common.add_file(mod1, "requirements.txt", mod1_req_txt, msg="initial commit") project = Project(project_dir) Project.set(project) project.load_module("mod1") requirements = SourceInfo(mod1, "inmanta_plugins.mod1").requires assert sorted(requirements) == sorted(["pytest>=1.5", "iplib>=0.0.1"]) project.virtualenv.use_virtual_env() # This would fail if the comments weren't filtered out project.virtualenv.install_from_list(requirements)
def _extend_path(ctx: Context, path: str): current_module_prefix = "." + os.path.sep if path.startswith(current_module_prefix): module_and_submodule_name_parts = ctx.owner.namespace.get_full_name( ).split("::") module_name = module_and_submodule_name_parts[0] if module_name in Project.get().modules.keys(): return os.path.join(module_name, path[len(current_module_prefix):]) else: raise Exception( f"Unable to determine current module for path {path}, called from {ctx.owner.namespace.get_full_name()}" ) return path
def run_project(self, root): project_dir = root env = os.path.join(project_dir, ".env") if os.path.exists(env): os.remove(env) os.symlink(self.env, env) project = os.path.join(project_dir, "project.yml") if os.path.exists(project): os.remove(project) with open(project, "w") as cfg: cfg.write(""" name: snippet test modulepath: [libs,%s] downloadpath: %s version: 1.0 repo: ['[email protected]:modules/', '[email protected]:config/']""" % (self.libs, self.libs)) Project.set(Project(project_dir)) compiler.do_compile() os.remove(project)
def test_agent_config(project: Project): project.compile(""" import ip host = ip::Host( name="test", ip="127.0.0.1", os=std::linux, ) """) agent_config = project.get_resource("std::AgentConfig") assert not agent_config project.compile(""" import ip host = ip::Host( name="test", ip="127.0.0.1", os=std::linux, remote_agent=true, ) """) agent_config = project.get_resource("std::AgentConfig") assert agent_config assert agent_config.uri == "ssh://[email protected]:22?python=python" project.compile(""" import ip host = ip::Host( name="test", ip="127.0.0.1", os=std::OS(name="testos", family=std::unix, python_cmd="test"), remote_agent=true, ) """) agent_config = project.get_resource("std::AgentConfig") assert agent_config assert agent_config.uri == "ssh://[email protected]:22?python=test"
def test_collect_python_requirements(tmpdir): # Create project common.makeproject(tmpdir, "test-project", deps=[("mod1", ""), ("mod2", "")], imports=["mod1", "mod2"]) project_dir = os.path.join(tmpdir, "test-project") libs_dir = os.path.join(project_dir, "libs") # Create mod1 common.makemodule(libs_dir, "mod1", project=False) mod1 = os.path.join(libs_dir, "mod1") mod1_req_txt = """iplib@git+https://github.com/bartv/python3-iplib pytest\ >=\ 1.5 iplib>=0.0.1 """ common.add_file(mod1, "requirements.txt", mod1_req_txt, msg="initial commit") # Create mod2 common.makemodule(libs_dir, "mod2", project=False) mod2 = os.path.join(libs_dir, "mod2") mod2_req_txt = """# A comment dummy-yummy # A comment # Another comment """ common.add_file(mod2, "requirements.txt", mod2_req_txt, msg="initial commit") project = Project(project_dir) Project.set(project) project.load_module("mod1") project.load_module("mod2") reqs = project.collect_python_requirements() expected_reqs = [ "iplib@git+https://github.com/bartv/python3-iplib", "pytest>=1.5", "iplib>=0.0.1", "dummy-yummy" ] assert sorted(reqs) == sorted(expected_reqs)
def test_plugin_loading_on_project_load(tmpdir, capsys): """ Load all plugins via the Project.load() method call and verify that no module is loaded twice when an import statement is used. """ main_cf = tmpdir.join("main.cf") main_cf.write("import submodule") project_yml = tmpdir.join("project.yml") project_yml.write( """ name: test modulepath: libs downloadpath: libs repo: https://github.com/inmanta/inmanta.git install_mode: master """ ) tmpdir.mkdir("libs") origin_mod_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "data", "modules", "submodule") mod_dir = tmpdir.join("libs", os.path.basename(origin_mod_dir)) shutil.copytree(origin_mod_dir, mod_dir) project = Project(tmpdir, autostd=False, venv_path=os.path.join(tmpdir, ".env")) Project.set(project) project.load() (stdout, stderr) = capsys.readouterr() assert stdout.count("#loading inmanta_plugins.submodule#") == 1 assert stdout.count("#loading inmanta_plugins.submodule.submod#") == 1 assert stdout.count("#loading inmanta_plugins.submodule.pkg#") == 1 assert stdout.count("#loading inmanta_plugins.submodule.pkg.submod2#") == 1 from inmanta_plugins.submodule import test assert test() == "test" (stdout, stderr) = capsys.readouterr() assert "#loading" not in stdout from inmanta_plugins.submodule.submod import test_submod assert test_submod() == "test_submod" (stdout, stderr) = capsys.readouterr() assert "#loading" not in stdout from inmanta_plugins.submodule.pkg import test_pkg assert test_pkg() == "test_pkg -- test_submod2" (stdout, stderr) = capsys.readouterr() assert "#loading" not in stdout
def do_compile(refs={}): """ Run run run """ project = Project.get() compiler = Compiler(os.path.join(project.project_path, project.main_file), refs=refs) LOGGER.debug("Starting compile") (statements, blocks) = compiler.compile() sched = scheduler.Scheduler() success = sched.run(compiler, statements, blocks) LOGGER.debug("Compile done") if not success: sys.stderr.write("Unable to execute all statements.\n") return (sched.get_types(), compiler.get_ns())
def get_project(self, load=False) -> Project: project = Project.get() if load: project.load() return project
def verify(self): """ Verify dependencies and frozen module versions """ Project.get().verify()
def test_v1_and_v2_module_installed_simultaneously(tmpdir: py.path.local, snippetcompiler_clean, capsys, caplog, modules_dir: str) -> None: """ When a module is installed both in V1 and V2 format, ensure that: * A warning is logged * The V2 module is loaded and not the V1 module. """ # Work around caching problem in venv feature_compiler_cache.set("False") module_name = "v1_print_plugin" def compile_and_verify( expected_message: str, expect_warning: bool, install_v2_modules: List[LocalPackagePath] = []) -> None: caplog.clear() snippetcompiler_clean.setup_for_snippet( f"import {module_name}", install_v2_modules=install_v2_modules, autostd=False) snippetcompiler_clean.do_export() assert expected_message in capsys.readouterr().out got_warning = f"Module {module_name} is installed as a V1 module and a V2 module" in caplog.text assert got_warning == expect_warning # Run compile. Only a V1 module is installed in the module path expected_message_v1 = "Hello world" compile_and_verify(expected_message=expected_message_v1, expect_warning=False) assert isinstance(Project.get().modules[module_name], ModuleV1) # Convert V1 module to V2 module and install it as well module_dir = os.path.join(modules_dir, module_name) v1_module_dir = os.path.join(tmpdir, "v1_module") shutil.copytree(module_dir, v1_module_dir) assert os.path.isdir(v1_module_dir) v2_module_dir = os.path.join(tmpdir, "v2_module") module = ModuleV1(project=DummyProject(autostd=False), path=v1_module_dir) ModuleConverter(module).convert(output_directory=v2_module_dir) # Print a different message in the V2 module, to detect which of both gets loaded expected_message_v2 = "Other message" with open(os.path.join(v2_module_dir, "model", "_init.cf"), "r+") as fd: content = fd.read() assert expected_message_v1 in content content = content.replace(expected_message_v1, expected_message_v2) assert expected_message_v2 in content fd.seek(0) fd.write(content) # Run compile again. V1 version and V2 version are installed simultaneously compile_and_verify( expected_message=expected_message_v2, expect_warning=True, install_v2_modules=[ LocalPackagePath(path=v2_module_dir, editable=False) ], ) assert isinstance(Project.get().modules[module_name], ModuleV2)
def reset(self): Project.set(Project(self.project_dir, autostd=Project.get().autostd)) loader.unload_inmanta_plugins()
def setup_for_snippet(self, snippet, autostd=True): self.setup_for_snippet_external(snippet) Project.set(Project(self.project_dir, autostd=autostd)) loader.unload_inmanta_plugins()
def load_project(venv_path: str) -> None: project: Project = Project(project_dir, venv_path=venv_path) Project.set(project) # don't load full project, only AST so we don't have to deal with module finder cleanup project.load_module_recursive(install=True)
def list(self, requires: bool = False) -> None: """ List all modules in a table """ def show_bool(b: bool) -> str: return "yes" if b else "no" table = [] project = Project.get() project.get_complete_ast() names: Sequence[str] = sorted(project.modules.keys()) specs: Dict[str, List[ InmantaModuleRequirement]] = project.collect_imported_requirements( ) for name in names: mod: Module = Project.get().modules[name] version = str(mod.version) if name not in specs: specs[name] = [] generation: str = str(mod.GENERATION.name).lower() reqv: str matches: bool editable: bool if isinstance(mod, ModuleV1): try: if project.install_mode == InstallMode.master: reqv = "master" else: release_only = project.install_mode == InstallMode.release versions = ModuleV1.get_suitable_version_for( name, specs[name], mod._path, release_only=release_only) if versions is None: reqv = "None" else: reqv = str(versions) except Exception: LOGGER.exception("Problem getting version for module %s" % name) reqv = "ERROR" matches = version == reqv editable = True else: reqv = ",".join(req.version_spec_str() for req in specs[name] if req.specs) or "*" matches = all(version in req for req in specs[name]) editable = mod.is_editable() table.append((name, generation, editable, version, reqv, matches)) if requires: LOGGER.warning( "The `inmanta module list -r` command has been deprecated.") for name, _, _, version, _, _ in table: print(" - %s==%s" % (name, version)) else: t = texttable.Texttable() t.set_deco(texttable.Texttable.HEADER | texttable.Texttable.BORDER | texttable.Texttable.VLINES) t.header(("Name", "Type", "Editable", "Installed version", "Expected in project", "Matches")) t.set_cols_dtype(("t", "t", show_bool, "t", "t", show_bool)) for row in table: t.add_row(row) print(t.draw())