def as_config( self, session, plugin: PluginRef, sources: List[PluginSettingValueSource] = None, redacted=False, ) -> Dict: # defaults to the meltano.yml for extraneous settings plugin_install = self.get_install(plugin) plugin_def = self.get_definition(plugin) config = deepcopy(plugin_install.config) # definition settings for setting in self.definitions(plugin): value, source = self.get_value(session, plugin, setting["name"]) if sources and source not in sources: continue # we don't want to leak secure informations # so we redact all `passwords` if redacted and value and setting.get("kind") == "password": value = REDACTED_VALUE nest(config, setting["name"], value) return config
def before_configure(self, invoker, session): project = invoker.project stub_path = project.plugin_dir(self).joinpath("airflow.cfg") # generate the default `airflow.cfg` handle = invoker.invoke( "--help", require_preparation=False, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, ) handle.wait() airflow_cfg_path = invoker.files["config"] logging.debug(f"Generated default '{str(airflow_cfg_path)}'") # move it to the config dir shutil.move(airflow_cfg_path, stub_path) airflow_cfg_path = stub_path logging.debug(f"Moved to '{str(stub_path)}'") # open the configuration and update it # now we let's update the config to use our stubs airflow_cfg = configparser.ConfigParser() with airflow_cfg_path.open() as cfg: airflow_cfg.read_file(cfg) logging.debug(f"Loaded '{str(airflow_cfg_path)}'") config = {} for key, value in invoker.plugin_config.items(): nest(config, key, str(value)) for section, cfg in config.items(): airflow_cfg[section].update(cfg) logging.debug(f"\tUpdated section [{section}] with {cfg}") with airflow_cfg_path.open("w") as cfg: airflow_cfg.write(cfg) logging.debug(f"Saved '{str(airflow_cfg_path)}'") # we've changed the configuration here, so we need to call # prepare again on the invoker so it re-reads the configuration # for the Airflow plugin invoker.prepare(session) handle = invoker.invoke( "initdb", stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, ) initdb = handle.wait() if initdb: raise SubprocessError("airflow initdb failed", handle) logging.debug(f"Completed `airflow initdb`")
def install_config_stub(self, invoker): project = invoker.project plugin_dir = project.plugin_dir(self) with open(plugin_dir.joinpath(self.config_files["config"]), "w") as config_stub: config = {} for key, value in invoker.plugin_config.items(): nest(config, key, value) json.dump(config, config_stub) logging.debug(f"Created configuration stub at {config_stub}")
def plugin_config(self, project): _, Session = project_engine(project) session = Session() try: plugin_settings_service = PluginSettingsService(project) raw_config = plugin_settings_service.as_config(session, self) finally: session.close() config = {} for key, value in raw_config.items(): nest(config, key, value, maxsplit=1) return config
def before_configure(self, invoker, session): project = invoker.project plugin_dir = project.plugin_dir(self) with open(plugin_dir.joinpath(self.config_files["config"]), "w") as config_stub: config = {} for key, value in invoker.plugin_config.items(): nest(config, key, value) # Metadata configuration is handled by SingerTap.apply_metadata_rules config.pop("metadata", None) json.dump(config, config_stub) logging.debug(f"Created configuration stub at {config_stub}")
def test_nest(): subject = {} b = nest(subject, "a.b") b["val"] = 1 assert b == {"val": 1} c = nest(subject, "a.b.c") c["val"] = 2 assert b == {"val": 1, "c": {"val": 2}} arr = nest(subject, "a.list", value=[]) VALUE = {"value": 1} val = nest(subject, "a.value", value=VALUE) assert subject["a"]["b"] is b assert subject["a"]["b"]["c"] is c assert isinstance(arr, list) # make sure it is a copy assert val == VALUE and not val is VALUE
def add_to_file(self, plugin: PluginInstall): installed_def = plugin.canonical() with self.project.meltano_update() as meltano_yml: if not plugin in self.plugins(): plugins = nest(meltano_yml, f"plugins.{plugin.type}", value=[]) plugins.append(installed_def) else: logging.warning( f"{plugin.name} is already present, use `meltano install` to install it." ) return plugin_factory(plugin.type, installed_def)
def Popen_options(self): env = os.environ.copy() venv_dir = self.project.venvs_dir(self.plugin.type, self.plugin.name) # add the Airflow virtualenv because it contains `gunicorn` env["PATH"] = os.pathsep.join( [str(venv_dir.joinpath("bin")), env["PATH"]]) env["VIRTUAL_ENV"] = str(venv_dir) env["AIRFLOW_HOME"] = str(self.config_service.run_dir) options = super().Popen_options() options_env = nest(options, "env") options_env.update(env) return options
def after_install(self, project, args=[]): _, Session = project_engine(project) session = Session() plugin_config_service = PluginConfigService( self, config_dir=project.plugin_dir(self), run_dir=project.run_dir(self.name), ) plugin_settings_service = PluginSettingsService(project) airflow_cfg_path = plugin_config_service.run_dir.joinpath( "airflow.cfg") stub_path = plugin_config_service.config_dir.joinpath("airflow.cfg") invoker = invoker_factory( project, self, prepare_with_session=session, plugin_config_service=plugin_config_service, ) try: # generate the default `airflow.cfg` handle = invoker.invoke("--help", stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) handle.wait() logging.debug(f"Generated default '{str(airflow_cfg_path)}'") # move it to the config dir shutil.move(airflow_cfg_path, stub_path) airflow_cfg_path = stub_path logging.debug(f"Moved to '{str(stub_path)}'") # open the configuration and update it # now we let's update the config to use our stubs airflow_cfg = configparser.ConfigParser() with airflow_cfg_path.open() as cfg: airflow_cfg.read_file(cfg) logging.debug(f"Loaded '{str(airflow_cfg_path)}'") config = {} for key, value in plugin_settings_service.as_config(session, self).items(): nest(config, key, str(value)) for section, cfg in config.items(): airflow_cfg[section].update(cfg) logging.debug(f"\tUpdated section [{section}] with {cfg}") with airflow_cfg_path.open("w") as cfg: airflow_cfg.write(cfg) logging.debug(f"Saved '{str(airflow_cfg_path)}'") # we've changed the configuration here, so we need to call # prepare again on the invoker so it re-reads the configuration # for the Airflow plugin invoker.prepare(session) handle = invoker.invoke( "initdb", stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, ) initdb = handle.wait() if initdb: raise SubprocessError("airflow initdb failed", handle) logging.debug(f"Completed `airflow initdb`") finally: session.close()
def process_config(self, flat_config): config = {} for key, value in flat_config.items(): nest(config, key, str(value)) return config