def _elt_context_builder( project, job, session, extractor, loader, transform, dry_run=False, full_refresh=False, select_filter=[], catalog=None, state=None, plugins_service=None, ): transform_name = None if transform != "skip": transform_name = _find_transform_for_extractor( extractor, plugins_service=plugins_service) return (ELTContextBuilder( project, plugins_service=plugins_service).with_session(session).with_job( job).with_extractor(extractor).with_loader(loader).with_transform( transform_name or transform).with_dry_run(dry_run).with_only_transform( transform == "only").with_full_refresh(full_refresh).with_select_filter( select_filter).with_catalog(catalog).with_state(state))
def elt_context_builder(project, plugin_settings_service, plugin_discovery_service): return ELTContextBuilder( project, plugin_settings_service=plugin_settings_service, plugin_discovery_service=plugin_discovery_service, )
def elt(project, extractor, loader, dry, transform, job_id): """ meltano elt EXTRACTOR_NAME LOADER_NAME extractor_name: Which extractor should be used in this extraction loader_name: Which loader should be used in this extraction """ job_logging_service = JobLoggingService(project) job = Job( job_id=job_id or f'job_{datetime.datetime.now().strftime("%Y%m%d-%H:%M:%S.%f")}') _, Session = project_engine(project) session = Session() try: with job.run(session), job_logging_service.create_log( job.job_id, job.run_id) as log_file, OutputLogger(log_file): try: success = install_missing_plugins(project, extractor, loader, transform) if not success: raise click.Abort() elt_context = (ELTContextBuilder(project).with_job( job).with_extractor(extractor).with_loader( loader).with_transform(transform).context(session)) if transform != "only": run_extract_load(elt_context, session, dry_run=dry) else: click.secho("Extract & load skipped.", fg="yellow") if elt_context.transformer: # Use a new session for the Transform Part to address the last # update for Job state not being saved in the DB transform_session = Session() try: run_transform(elt_context, transform_session, dry_run=dry) finally: transform_session.close() else: click.secho("Transformation skipped.", fg="yellow") except Exception as err: logging.error( f"ELT could not complete, an error happened during the process: {err}" ) raise click.Abort() finally: session.close() # fmt: on tracker = GoogleAnalyticsTracker(project) tracker.track_meltano_elt(extractor=extractor, loader=loader, transform=transform)
def get_db_engine(self, extractor, loader, transform): project = Project.find() context = (ELTContextBuilder(project).with_session( db.session).with_extractor(extractor).with_loader( loader).with_transform(transform).context()) connection_service = ConnectionService(context) engine_hooks = [] dialect = connection_service.dialect engine_uri = None params = None try: params = connection_service.analyze_params() engine_uri = connection_service.analyze_uri() if dialect not in ENABLED_DIALECTS: raise UnsupportedConnectionDialect(dialect) except DialectNotSupportedError: raise UnsupportedConnectionDialect(dialect) if dialect == "postgres": def set_connection_schema(raw, conn): schema = params["schema"] with raw.cursor() as cursor: res = cursor.execute(f"SET search_path TO {schema};") logging.debug(f"Connection schema set to {schema}") engine_hooks.append(lambda engine: listen(engine, "first_connect", set_connection_schema)) engine = sqlalchemy.create_engine(engine_uri) for hook in engine_hooks: hook(engine) return engine
def elt_context_builder(project, project_plugins_service): return ELTContextBuilder(project, plugins_service=project_plugins_service)