def invoke(project, plugin_type, dump, plugin_name, plugin_args): plugin_type = PluginType.from_cli_argument( plugin_type) if plugin_type else None _, Session = project_engine(project) session = Session() plugins_service = ProjectPluginsService(project) plugin = plugins_service.find_plugin(plugin_name, plugin_type=plugin_type, invokable=True) try: invoker = invoker_factory(project, plugin, plugins_service=plugins_service) with invoker.prepared(session): if dump: dump_file(invoker, dump) exit_code = 0 else: handle = invoker.invoke(*plugin_args) exit_code = handle.wait() except SubprocessError as err: logger.error(err.stderr) raise finally: session.close() tracker = GoogleAnalyticsTracker(project) tracker.track_meltano_invoke(plugin_name=plugin_name, plugin_args=" ".join(plugin_args)) sys.exit(exit_code)
def invoke(project, plugin_name, plugin_args): _, Session = project_engine(project) try: session = Session() config_service = ConfigService(project) plugin = config_service.find_plugin(plugin_name) service = invoker_factory(project, plugin, prepare_with_session=session) handle = service.invoke(*plugin_args) exit_code = handle.wait() tracker = GoogleAnalyticsTracker(project) tracker.track_meltano_invoke(plugin_name=plugin_name, plugin_args=" ".join(plugin_args)) sys.exit(exit_code) except Exception as err: logging.exception(err) click.secho(f"An error occured: {err}.", fg="red") raise click.Abort() from err finally: session.close()
def start_all(self): _, Session = project_engine(self.project) logs_dir = self.project.run_dir("airflow", "logs") try: session = Session() invoker = invoker_factory(self.project, self._plugin, prepare_with_session=session) # fmt: off with logs_dir.joinpath("webserver.log").open("w") as webserver, \ logs_dir.joinpath("scheduler.log").open("w") as scheduler: self._webserver = invoker.invoke("webserver", "-w", "1", stdout=webserver) self._scheduler = invoker.invoke("scheduler", stdout=scheduler) self.pid_file("webserver").write_pid(self._webserver.pid) self.pid_file("scheduler").write_pid(self._scheduler.pid) # fmt: on # Time padding for server initialization so UI iframe displays as expected # (iteration potential on approach but following UIAvailableWorker sleep approach) time.sleep(2) finally: session.close()
def transformer_invoker(self): return invoker_factory( self.project, self.transformer.install, plugin_settings_service=self.transformer.settings_service, plugin_discovery_service=self.plugin_discovery_service, )
def loader_invoker(self): return invoker_factory( self.project, self.loader.install, run_dir=self.elt_run_dir, plugin_settings_service=self.loader.settings_service, plugin_discovery_service=self.plugin_discovery_service, )
def _factory(plugin, **kwargs): return invoker_factory( project, plugin, plugin_settings_service=plugin_settings_service, plugin_discovery_service=plugin_discovery_service, **kwargs, )
def _factory(plugin, **kwargs): return invoker_factory( project, plugin, plugins_service=project_plugins_service, plugin_settings_service=plugin_settings_service_factory(plugin), **kwargs, )
def extractor_invoker(self): return invoker_factory( self.project, self.extractor.install, run_dir=self.elt_run_dir, plugin_config=self.extractor.config, plugin_settings_service=self.plugin_settings_service, plugin_discovery_service=self.plugin_discovery_service, )
def load_catalog(self, session): invoker = invoker_factory(self.project, self.extractor, plugins_service=self.plugins_service) with invoker.prepared(session): catalog_json = invoker.dump("catalog") return json.loads(catalog_json)
def load_schema(self, session): invoker = invoker_factory(self.project, self.extractor, prepare_with_session=session) # ensure we already have the discovery run at least once if not invoker.files["catalog"].exists(): logging.info( "Catalog not found, trying to run the tap with --discover.") self.extractor.run_discovery(invoker) # update the catalog accordingly self.extractor.apply_metadata_rules(invoker) # return the updated catalog with invoker.files["catalog"].open() as catalog: return json.load(catalog)
def invoker_for(self, plugin_type): plugin_contexts = { PluginType.EXTRACTORS: self.extractor, PluginType.LOADERS: self.loader, PluginType.TRANSFORMERS: self.transformer, } plugin_context = plugin_contexts[plugin_type] return invoker_factory( self.project, plugin_context.plugin, context=self, run_dir=self.elt_run_dir, plugins_service=self.plugins_service, plugin_settings_service=plugin_context.settings_service, )
def start_all(self): _, Session = project_engine(self.project) logs_path = self.project.run_dir("airflow", "logs", "scheduler.log") try: session = Session() invoker = invoker_factory(self.project, self._plugin, prepare_with_session=session) with logs_path.open("w") as logs_file: scheduler = invoker.invoke("scheduler", "--pid", str(self.pid_file), stdout=logs_file) self.pid_file.write_pid(scheduler.pid) finally: session.close()
async def test_extractor(config={}): try: settings_service = settings.with_config_override( PluginSettingsService.unredact(config)) invoker = invoker_factory( project, plugin, prepare_with_session=db.session, plugin_settings_service=settings_service, ) process = await invoker.invoke_async(stdout=asyncio.subprocess.PIPE ) return await test_stream(process.stdout) except Exception as err: # if anything happens, this is not successful return False finally: try: if process: psutil.Process(process.pid).terminate() except Exception as err: logging.debug(err)
async def test_extractor(): process = None try: invoker = invoker_factory( project, plugin, plugins_service=plugins_service, plugin_settings_service=settings, ) with invoker.prepared(db.session): process = await invoker.invoke_async( stdout=asyncio.subprocess.PIPE) return await test_stream(process.stdout) except Exception as err: logging.debug(err) # if anything happens, this is not successful return False finally: try: if process: psutil.Process(process.pid).terminate() except Exception as err: logging.debug(err)
def after_install(self, project, args=[]): _, Session = project_engine(project) session = Session() plugin_config_service = PluginConfigService( self, config_dir=project.plugin_dir(self), run_dir=project.run_dir(self.name), ) plugin_settings_service = PluginSettingsService(project) airflow_cfg_path = plugin_config_service.run_dir.joinpath( "airflow.cfg") stub_path = plugin_config_service.config_dir.joinpath("airflow.cfg") invoker = invoker_factory( project, self, prepare_with_session=session, plugin_config_service=plugin_config_service, ) try: # generate the default `airflow.cfg` handle = invoker.invoke("--help", stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) handle.wait() logging.debug(f"Generated default '{str(airflow_cfg_path)}'") # move it to the config dir shutil.move(airflow_cfg_path, stub_path) airflow_cfg_path = stub_path logging.debug(f"Moved to '{str(stub_path)}'") # open the configuration and update it # now we let's update the config to use our stubs airflow_cfg = configparser.ConfigParser() with airflow_cfg_path.open() as cfg: airflow_cfg.read_file(cfg) logging.debug(f"Loaded '{str(airflow_cfg_path)}'") config = {} for key, value in plugin_settings_service.as_config(session, self).items(): nest(config, key, str(value)) for section, cfg in config.items(): airflow_cfg[section].update(cfg) logging.debug(f"\tUpdated section [{section}] with {cfg}") with airflow_cfg_path.open("w") as cfg: airflow_cfg.write(cfg) logging.debug(f"Saved '{str(airflow_cfg_path)}'") # we've changed the configuration here, so we need to call # prepare again on the invoker so it re-reads the configuration # for the Airflow plugin invoker.prepare(session) handle = invoker.invoke( "initdb", stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, ) initdb = handle.wait() if initdb: raise SubprocessError("airflow initdb failed", handle) logging.debug(f"Completed `airflow initdb`") finally: session.close()