def test_get_tree(self): """ Method 'get_tree()' should return the entire data tree """ c = ConfTree() c.tree = {"foo": True} assert c.get_tree() == {"foo": True}
def __init__(self, cli_args): """ :param cli_args: Arguments structure from docopt. """ self.args = cli_args log.debug("Cli args: {}".format(self.args)) self.default_config_files = [ "/etc/dj.yaml", "/etc/dj.json", os.path.expanduser("~/.dj.yaml"), os.path.expanduser("~/.dj.json"), os.path.join(os.getcwd(), ".dj.yaml"), os.path.join(os.getcwd(), ".dj.json"), ] log.debug("DEFAULT_CONFIG_FILES: {}".format(self.default_config_files)) self.outfile = "" # Load all config files into unified config tree log.debug("Building config...") self.config = ConfTree(self.default_config_files) self.config.load_config_files() log.debug("Config building is done")
def test_merge_data_tree_not_dict(self): """ Exception should be raised if trying to merge a object that is not a dict """ c = ConfTree() with pytest.raises(Exception): c.merge_data_tree([])
def test_merge_data_tree(self): """ Test that merging of 2 data tree:s work """ c = ConfTree() c.tree = {"foo": True} c.merge_data_tree({"bar": False}) assert c.tree == {"foo": True, "bar": False}
def test_load_file_not_exists(self): """ Test to load a file that do not exists on the system """ dummy_files = ["/tmp/foobar/opalopa"] c = ConfTree(config_files=dummy_files) with pytest.raises(Exception): c.load_config_file()
def test_load_file_not_exists(): """ Test to load a file that do not exists on the system """ c = ConfTree() dummy_file = "/tmp/foobar/opalopa" with pytest.raises(Exception): c.load_config_file(dummy_file)
def test_load_xml_file(self, tmpdir): """ This will test that exception is raised when file contains something but it is not a supported datatype. For example XML. """ f = tmpdir.join("xml.json") f.write("<b>foo</b>") with pytest.raises(Exception): c = ConfTree() c.load_config_file(str(f))
def test_load_empty_file(self, tmpdir): """ Test that when loading a file with no content it will raise exception """ f = tmpdir.join("empty.json") f.write("") assert f.read() == "" with pytest.raises(Exception): c = ConfTree() c.load_config_file(str(f))
def test_create_obj(self): """ Test that creating a ConfTree object works and sets default values for internal variables. """ c = ConfTree() assert c.config_files == [] assert c.tree == {} # Only lists are allowed to be passed in with pytest.raises(Exception) as ex: c = ConfTree(config_files={"foo": "bar"}) assert str(ex.value).startswith( "config files must be a list of items that can be read from FS")
def __init__(self, cli_args): """ :param cli_args: Arguments structure from docopt. """ self.args = cli_args # Context hash to store context for template environment self.environment_vars = { "globals": {}, "filters": {}, } Log.debug("Cli args: %s", self.args) self.default_config_files = [ os.path.expanduser("~/.dj.yaml"), os.path.expanduser("~/.dj.json"), os.path.join(os.getcwd(), ".dj.yaml"), os.path.join(os.getcwd(), ".dj.json"), ] Log.debug("DEFAULT_CONFIG_FILES: %s", self.default_config_files) # Load all config files into unified config tree, don't fail on load since # default config files might not exist. Log.debug("Building config...") self.config = ConfTree() self.config.load_config_files(self.default_config_files, onload_fail=False) Log.debug("Config building is done")
def test_store_config_files(self): """ Initial specefied config files should be stored internally in ConfTree object """ dummy_files = ["/foo/bar"] c = ConfTree(config_files=dummy_files) assert c.config_files, dummy_files
def test_create_obj(self): """ Create empty object and ensure defaults is set correctly """ c = Core({}) assert c.args == {} # Test that loading all default config files works ct = ConfTree(c.default_config_files) assert ct.tree == c.config.get_tree()
def test_get(self): """ Test that 'get()' returns correct data """ c = ConfTree() c.tree = {"foo": True, "bar": 1, "qwe": "rty"} assert c.get("foo", False) is True assert c.get("bar", -1) == 1 assert c.get("qwe", "ytr") == "rty" assert c.get("foobar", "barfoo") == "barfoo"
def test_get(self): """ Test that 'get()' returns correct data """ c = ConfTree() c.tree = { "foo": True, "bar": 1, "qwe": "rty" } assert c.get("foo", False) is True assert c.get("bar", -1) == 1 assert c.get("qwe", "ytr") == "rty" assert c.get("foobar", "barfoo") == "barfoo"
class Core(object): def __init__(self, cli_args): """ :param cli_args: Arguments structure from docopt. """ self.args = cli_args log.debug("Cli args: {}".format(self.args)) self.default_config_files = [ "/etc/dj.yaml", "/etc/dj.json", os.path.expanduser("~/.dj.yaml"), os.path.expanduser("~/.dj.json"), os.path.join(os.getcwd(), ".dj.yaml"), os.path.join(os.getcwd(), ".dj.json"), ] log.debug("DEFAULT_CONFIG_FILES: {}".format(self.default_config_files)) self.outfile = "" # Load all config files into unified config tree log.debug("Building config...") self.config = ConfTree(self.default_config_files) self.config.load_config_files() log.debug("Config building is done") def parse_env_vars(self): """ Parse all variables inputed from cli and add them to global config """ vars = {} for var in self.args.get("--env", []): s = var.split("=") if len(s) != 2 or (len(s[0]) == 0 or len(s[1]) == 0): raise Exception("var '{0}' is not of format 'key=value'".format(var)) vars[s[0]] = s[1] self.config.merge_data_tree({"env": vars}) def load_user_specefied_config_file(self): """ Loads any config file specefied by user from commandline. It should only be possible to load one user specefied config file. """ user_specefied_config_file = self.args.pop("--config", None) if user_specefied_config_file: log.debug("Loading user specefied config file : {}".format(user_specefied_config_file)) self.config.load_config_file(user_specefied_config_file) def handle_data_sources(self): """ Take all specefied datasources from cli and merge with any in config then try to import all datasources and raise exception if it fails. """ # TODO: Push datasources into conftree but because they are lists they wont merge easy # via dict.update() ds = self.args.get("--datasource", []) ds.extend(self.config.tree.get("datasources", [])) # Find all contrib files and add them to datasources to load ds.extend([getattr(contrib, c).__file__ for c in dir(contrib) if not c.startswith("_")]) # Load all specefied datasource files for datasource_file in ds: if not os.path.exists(datasource_file): raise Exception("Unable to load datasource file : {}".format(datasource_file)) p = os.path.dirname(datasource_file) try: # Append to sys path so we can import the python file sys.path.insert(0, p) datasource_path = os.path.splitext(os.path.basename(datasource_file))[0] log.debug("{0}".format(datasource_path)) # Import python file but do nothing with it because all datasources should # handle and register themself to jinja. i = __import__(datasource_path) # Auto load all filters and global functions if they follow name pattern for method in dir(i): if method.lower().startswith("_filter_"): method_name = method.replace("_filter_", "") self._attach_function("filters", getattr(i, method), method_name) elif method.lower().startswith("_global_"): method_name = method.replace("_global_", "") self._attach_function("globals", getattr(i, method), method_name) except ImportError as ie: log.critical("cannot load datasource. {}".format(ie)) raise ie finally: # Clean out path to avoid issue sys.path.remove(p) def process_dockerfile(self): """ Read source dockerfile --> Render with jinja --> Write to outfile """ source_dockerfile = self.args["--dockerfile"] with open(source_dockerfile, "r") as stream: log.info("Reading source file...") template = Template(stream.read()) # Update the jinja environment with all custom functions & filters self._update_env(template.environment) context = self.config.get("env", {}) log.info("Rendering context") for k, v in context.items(): log.info(" * %s: %s" % (k, v)) log.info("Rendering Dockerfile...") out_data = template.render(**context) log.debug("\n******\nWriting to file\n*******") log.debug(out_data) if "--outfile" not in self.args: log.debug("No --outfile <FILE> was specified. Defaulting to Dockerfile") self.outfile = "Dockerfile" else: self.outfile = self.args['--outfile'] with open(self.outfile, "w") as stream: log.info("Writing to outfile...") stream.write(out_data) def _attach_function(self, attr, func, name): """ Register a function so it can be used within Jinja """ log.debug("Attaching function to jinja : {} : {} : {}".format(attr, func.__name__, name)) global _local_env _local_env[attr][name] = func return func def _update_env(self, env): """ Given a jinja environment, update it with third party collected environment extensions. """ env.globals.update(_local_env["globals"]) env.filters.update(_local_env["filters"]) def main(self): """ Runs all logic in application """ self.load_user_specefied_config_file() self.parse_env_vars() self.handle_data_sources() self.process_dockerfile() log.info("Done... Bye :]")
class Core(object): def __init__(self, cli_args): """ :param cli_args: Arguments structure from docopt. """ self.args = cli_args # Context hash to store context for template environment self.environment_vars = { "globals": {}, "filters": {}, } Log.debug("Cli args: %s", self.args) self.default_config_files = [ os.path.expanduser("~/.dj.yaml"), os.path.expanduser("~/.dj.json"), os.path.join(os.getcwd(), ".dj.yaml"), os.path.join(os.getcwd(), ".dj.json"), ] Log.debug("DEFAULT_CONFIG_FILES: %s", self.default_config_files) # Load all config files into unified config tree, don't fail on load since # default config files might not exist. Log.debug("Building config...") self.config = ConfTree() self.config.load_config_files(self.default_config_files, onload_fail=False) Log.debug("Config building is done") def parse_env_vars(self): """ Parse all variables inputed from cli and add them to global config """ _vars = {} for var in self.args.get("--env", []): s = var.split("=") if len(s) != 2 or (len(s[0]) == 0 or len(s[1]) == 0): raise Exception("var '{0}' is not of format 'key=value'".format(var)) _vars[s[0]] = s[1] self.config.merge_data_tree(_vars) def load_user_specefied_config_files(self): """ Loads any config file specefied by user from commandline. It should only be possible to load one user specefied config file. """ user_specefied_config_files = self.args.get("--config", []) self.config.load_config_files(user_specefied_config_files) def handle_data_sources(self): """ Take all specefied datasources from cli and merge with any in config then try to import all datasources and raise exception if it fails. """ ds = self.args.get("--datasource", []) ds.extend(self.config.tree.get("datasources", [])) # Find all contrib files and add them to datasources to load ds.extend([getattr(contrib, c).__file__ for c in dir(contrib) if not c.startswith("_")]) # Load all specefied datasource files for datasource_file in ds: p = os.path.dirname(datasource_file) try: # Append to sys path so we can import the python file sys.path.insert(0, p) datasource_path = os.path.splitext(os.path.basename(datasource_file))[0] Log.debug("%s", datasource_path) # Import python file but do nothing with it because all datasources should # handle and register themself to jinja. i = __import__(datasource_path) # Auto load all filters and global functions if they follow name pattern for method in dir(i): if method.lower().startswith("_filter_"): method_name = method.replace("_filter_", "") self.attach_function("filters", getattr(i, method), method_name) elif method.lower().startswith("_global_"): method_name = method.replace("_global_", "") self.attach_function("globals", getattr(i, method), method_name) except ImportError as ie: Log.error("Unable to import - %s", datasource_file) Log.error("%s", ie) raise ExitError("import failed") finally: # Clean out path to avoid issue sys.path.remove(p) def handle_dockerfile(self): """ Handle errors and pass the invocation to process_dockerfile. """ try: self.process_dockerfile() except FileProcessingError as e: Log.error("Couldn't process - %s", e.args[1]) Log.error("%s", e.args[0]) raise ExitError("dockerfile not loaded") def process_dockerfile(self): """ Read source dockerfile --> Render with jinja --> Write to outfile """ source_dockerfile = self.args["--dockerfile"] outputfile = self.args["--outfile"] try: with open(source_dockerfile, "r") as stream: Log.info("Reading source file...") environment = self.get_template_environment() template = environment.from_string(stream.read()) except (OSError, IOError) as e: raise FileProcessingError(e, source_dockerfile) context = self.config.get_tree() Log.debug("context: %s", context) Log.info("rendering Dockerfile...") out_data = template.render(**context) Log.debug("Data to be written to the output file\n*****\n%s*****", out_data) try: with open(outputfile, "w") as stream: Log.info("Writing to outfile...") stream.write(out_data) except (OSError, IOError) as e: raise FileProcessingError(e, outputfile) def attach_function(self, attr, func, name): """ Add function to environment context hash so it can be used within Jinja """ Log.debug("Attaching function to jinja : %s : %s : %s", attr, func.__name__, name) self.environment_vars[attr][name] = func return func def get_template_environment(self): """ Given a jinja templated environment, updated with our globals and filters. """ # we'll render a file, so we should preserve newlines as they are environment = Environment(keep_trailing_newline=True) for n in ('globals', 'filters'): env_vars = getattr(environment, n) env_vars.update(self.environment_vars[n]) return environment def main(self): """ Runs all logic in application """ try: self.load_user_specefied_config_files() self.parse_env_vars() self.handle_data_sources() self.handle_dockerfile() except ExitError: sys.exit(1) Log.info("Done... Bye :]")