def start(ctx, reload, bind, bind_port): if bind: ProjectSettingsService.config_override["ui.bind_host"] = bind if bind_port: ProjectSettingsService.config_override["ui.bind_port"] = bind_port project = ctx.obj["project"] tracker = GoogleAnalyticsTracker(project) tracker.track_meltano_ui() ensure_secure_setup(project) workers = [] try: compiler_worker = MeltanoCompilerWorker(project) compiler_worker.compiler.compile() workers.append(compiler_worker) except Exception as e: logger.error(f"Initial compilation failed: {e}") workers.append(UIAvailableWorker(project)) workers.append( APIWorker(project, reload=reload or os.getenv("FLASK_ENV") == "development") ) cleanup = start_workers(workers) def handle_terminate(signal, frame): cleanup() signal.signal(signal.SIGTERM, handle_terminate) logger.info("All workers started.")
def add(ctx, name, extractor, loader, transform, interval, start_date): """ Add a new schedule \b NAME:\tThe schedule name, must be unique EXTRACTOR:\tWhich extractor should be used LOADER:\tWhich loader should be used INTERVAL:\tCron-like syntax to specify the schedule interval (@daily, @hourly, etc…) """ schedule_service = ctx.obj["schedule_service"] session = ctx.obj["session"] try: tracker = GoogleAnalyticsTracker(schedule_service.project) schedule = schedule_service.add(session, name, extractor, loader, transform, interval, start_date) tracker.track_meltano_schedule(schedule) click.echo(f"Scheduled '{schedule.name}' at {schedule.interval}") except ScheduleAlreadyExistsError as serr: click.secho(f"Schedule '{serr.schedule.name}' already exists.", fg="yellow") except Exception as err: click.secho(f"Scheduling failed: {err}", fg="red", err=True) raise click.Abort()
def invoke(project, plugin_type, dump, plugin_name, plugin_args): plugin_type = PluginType.from_cli_argument( plugin_type) if plugin_type else None _, Session = project_engine(project) session = Session() plugins_service = ProjectPluginsService(project) plugin = plugins_service.find_plugin(plugin_name, plugin_type=plugin_type, invokable=True) try: invoker = invoker_factory(project, plugin, plugins_service=plugins_service) with invoker.prepared(session): if dump: dump_file(invoker, dump) exit_code = 0 else: handle = invoker.invoke(*plugin_args) exit_code = handle.wait() except SubprocessError as err: logger.error(err.stderr) raise finally: session.close() tracker = GoogleAnalyticsTracker(project) tracker.track_meltano_invoke(plugin_name=plugin_name, plugin_args=" ".join(plugin_args)) sys.exit(exit_code)
def install(project, plugin_type, plugin_name, include_related): """ Installs all the dependencies of your project based on the meltano.yml file. Read more at https://www.meltano.com/docs/command-line-interface.html. """ plugins_service = ProjectPluginsService(project) if plugin_type: plugin_type = PluginType.from_cli_argument(plugin_type) plugins = plugins_service.get_plugins_of_type(plugin_type) if plugin_name: plugins = [p for p in plugins if p.name in plugin_name] else: plugins = list(plugins_service.plugins()) if include_related: add_service = ProjectAddService(project, plugins_service=plugins_service) related_plugins = add_related_plugins(project, plugins, add_service=add_service) plugins.extend(related_plugins) # We will install the plugins in reverse order, since dependencies # are listed after their dependents in `related_plugins`, but should # be installed first. plugins.reverse() click.echo(f"Installing {len(plugins)} plugins...") success = install_plugins(project, plugins) tracker = GoogleAnalyticsTracker(project) tracker.track_meltano_install() if not success: raise CliError("Failed to install plugin(s)")
def start(ctx, reload, bind_port, bind): project = ctx.obj["project"] tracker = GoogleAnalyticsTracker(project) tracker.track_meltano_ui() workers = [] try: compiler_worker = MeltanoCompilerWorker(project) compiler_worker.compiler.compile() workers.append(compiler_worker) except Exception as e: logger.error(f"Initial compilation failed: {e}") workers.append(UIAvailableWorker("http://localhost:{bind_port}")) workers.append( APIWorker( project, f"{bind}:{bind_port}", reload=reload or os.getenv("FLASK_ENV") == "development", ) ) cleanup = start_workers(workers) def handle_terminate(signal, frame): cleanup() signal.signal(signal.SIGTERM, handle_terminate) logger.info("All workers started.")
def install(project, plugin_type, plugin_name, include_related): """ Installs all the dependencies of your project based on the meltano.yml file. Read more at https://www.meltano.com/docs/command-line-interface.html. """ config_service = ConfigService(project) if plugin_type: plugin_type = PluginType.from_cli_argument(plugin_type) plugins = config_service.get_plugins_of_type(plugin_type) if plugin_name: plugins = [p for p in plugins if p.name in plugin_name] else: plugins = list(config_service.plugins()) if include_related: add_service = ProjectAddService(project, config_service=config_service) related_plugins = add_related_plugins(project, plugins, add_service=add_service) plugins.extend(related_plugins) click.echo(f"Installing {len(plugins)} plugins...") success = install_plugins(project, plugins) tracker = GoogleAnalyticsTracker(project) tracker.track_meltano_install() if not success: raise click.Abort()
def add(ctx, name, extractor, loader, transform, interval, start_date): """ Add a new schedule \b NAME:\tThe schedule name, must be unique EXTRACTOR:\tWhich extractor should be used LOADER:\tWhich loader should be used INTERVAL:\tCron-like syntax to specify the schedule interval (@daily, @hourly, etc…) """ project = ctx.obj["project"] schedule_service = ctx.obj["schedule_service"] _, Session = project_engine(project) session = Session() try: tracker = GoogleAnalyticsTracker(schedule_service.project) schedule = schedule_service.add(session, name, extractor, loader, transform, interval, start_date) tracker.track_meltano_schedule(schedule) click.echo(f"Scheduled '{schedule.name}' at {schedule.interval}") except ScheduleAlreadyExistsError as serr: click.secho(f"Schedule '{serr.schedule.name}' already exists.", fg="yellow") finally: session.close()
def invoke(project, plugin_name, plugin_args): _, Session = project_engine(project) try: session = Session() config_service = ConfigService(project) plugin = config_service.find_plugin(plugin_name) service = invoker_factory(project, plugin, prepare_with_session=session) handle = service.invoke(*plugin_args) exit_code = handle.wait() tracker = GoogleAnalyticsTracker(project) tracker.track_meltano_invoke(plugin_name=plugin_name, plugin_args=" ".join(plugin_args)) sys.exit(exit_code) except Exception as err: logging.exception(err) click.secho(f"An error occured: {err}.", fg="red") raise click.Abort() from err finally: session.close()
def elt(project, extractor, loader, dry, transform, job_id): """ meltano elt EXTRACTOR_NAME LOADER_NAME extractor_name: Which extractor should be used in this extraction loader_name: Which loader should be used in this extraction """ job_logging_service = JobLoggingService(project) job = Job( job_id=job_id or f'job_{datetime.datetime.now().strftime("%Y%m%d-%H:%M:%S.%f")}') _, Session = project_engine(project) session = Session() try: with job.run(session), job_logging_service.create_log( job.job_id, job.run_id) as log_file, OutputLogger(log_file): try: success = install_missing_plugins(project, extractor, loader, transform) if not success: raise click.Abort() elt_context = (ELTContextBuilder(project).with_job( job).with_extractor(extractor).with_loader( loader).with_transform(transform).context(session)) if transform != "only": run_extract_load(elt_context, session, dry_run=dry) else: click.secho("Extract & load skipped.", fg="yellow") if elt_context.transformer: # Use a new session for the Transform Part to address the last # update for Job state not being saved in the DB transform_session = Session() try: run_transform(elt_context, transform_session, dry_run=dry) finally: transform_session.close() else: click.secho("Transformation skipped.", fg="yellow") except Exception as err: logging.error( f"ELT could not complete, an error happened during the process: {err}" ) raise click.Abort() finally: session.close() # fmt: on tracker = GoogleAnalyticsTracker(project) tracker.track_meltano_elt(extractor=extractor, loader=loader, transform=transform)
def discover(project, plugin_type): discover_service = PluginDiscoveryService(project) try: discovery_dict = discover_service.discover(plugin_type) for plugin_type, plugins in discovery_dict.items(): click.secho(plugin_type, fg="green") for plugin in plugins: click.echo(plugin) tracker = GoogleAnalyticsTracker(project) tracker.track_meltano_discover(plugin_type=plugin_type) except Exception as e: click.secho("Cannot list available plugins.", fg="red") raise click.ClickException(str(e))
def ui(project, reload, bind_port, bind): tracker = GoogleAnalyticsTracker(project) tracker.track_meltano_ui() loop = asyncio.get_event_loop() # we need to prime the ChildWatcher here so we can # call subprocesses asynchronously from threads # # see https://docs.python.org/3/library/asyncio-subprocess.html#subprocess-and-threads # TODO: remove when running on Python 3.8 asyncio.get_child_watcher() workers = [] if not truthy(os.getenv("MELTANO_DISABLE_AIRFLOW", False)): workers.append(AirflowWorker(project)) workers.append(MeltanoCompilerWorker(project)) # we need to whitelist the loaders here because not # all the loaders support dbt in the first place dbt_docs_loader = os.getenv("MELTANO_DBT_DOCS_LOADER", "target-postgres") if dbt_docs_loader: workers.append(DbtWorker(project, dbt_docs_loader, loop=loop)) else: logging.info( "No loader enabled for dbt docs generation, set the MELTANO_DBT_DOCS_LOADER variable to enable one." ) workers.append(UIAvailableWorker("http://localhost:{bind_port}")) workers.append( APIWorker( project, f"{bind}:{bind_port}", reload=reload or os.getenv("FLASK_ENV") == "development", )) cleanup = start_workers(workers) def handle_terminate(signal, frame): cleanup() signal.signal(signal.SIGTERM, handle_terminate) logger.info("All workers started.")
def grant(project, db, spec, dry, diff, refresh): """Grant the permissions provided in the provided specification file.""" try: if not dry: click.secho("Error: Only dry runs are supported at the moment", fg="red") sys.exit(1) sql_commands = grant_permissions(db, spec, dry_run=dry, refresh=refresh) tracker = GoogleAnalyticsTracker(project) tracker.track_meltano_permissions_grant(db=db, dry=dry) click.secho() if diff: click.secho( "SQL Commands generated for given spec file (Full diff with both new and already granted commands):" ) else: click.secho("SQL Commands generated for given spec file:") click.secho() diff_prefix = "" for command in sql_commands: if command["already_granted"] and not refresh: if diff: fg = "cyan" diff_prefix = " " else: continue else: fg = "green" if diff: diff_prefix = "+ " click.secho(f"{diff_prefix}{command['sql']};", fg=fg) except SpecLoadingError as exc: for line in str(exc).splitlines(): click.secho(line, fg="red") sys.exit(1)
def add(ctx, project, plugin_type, plugin_name, **flags): if flags["custom"]: if plugin_type in ("transformer", "transform", "orchestrator"): click.secho( f"--custom is not supported for {ctx.invoked_subcommand}") raise click.Abort() add_service = ProjectAddCustomService(project) else: add_service = ProjectAddService(project) add_plugin( add_service, project, PluginType(f"{plugin_type}s"), plugin_name, include_related=flags["include_related"], ) tracker = GoogleAnalyticsTracker(project) tracker.track_meltano_add(plugin_type=plugin_type, plugin_name=plugin_name)
def init(ctx, project_name, no_usage_stats): """ Creates a new Meltano project """ if ctx.obj["project"]: logging.warning(f"Found meltano project at: {ctx.obj['project'].root}") raise click.ClickException( "`meltano init` cannot run inside a Meltano project.") init_service = ProjectInitService(project_name) try: project = init_service.init() init_service.echo_instructions() tracker = GoogleAnalyticsTracker(project) if no_usage_stats: tracker.update_permission_to_track(False) else: tracker.track_meltano_init(project_name=project_name) except ProjectInitServiceError as e: click.secho(f"Directory {project_name} already exists!", fg="red") raise click.Abort() except SubprocessError as proc_err: click.secho(str(proc_err), fg="red") stderr = proc_err.process.stderr if not isinstance(stderr, str): stderr = stderr.read() click.secho(stderr, err=True) raise click.Abort()
def select(project, extractor, entities_filter, attributes_filter, **flags): try: if flags["list"]: show(project, extractor, show_all=flags["all"]) else: add( project, extractor, entities_filter, attributes_filter, exclude=flags["exclude"], ) tracker = GoogleAnalyticsTracker(project) tracker.track_meltano_select( extractor=extractor, entities_filter=entities_filter, attributes_filter=attributes_filter, flags=flags, ) except PluginExecutionError as err: raise CliError(f"Cannot list the selected attributes: {err}") from err
def init(ctx, project_name, no_usage_stats): """ Creates a new Meltano project """ if ctx.obj["project"]: logging.warning(f"Found meltano project at: {ctx.obj['project'].root}") raise CliError("`meltano init` cannot run inside a Meltano project.") if no_usage_stats: ProjectSettingsService.config_override[ "send_anonymous_usage_stats"] = False init_service = ProjectInitService(project_name) try: project = init_service.init() init_service.echo_instructions() tracker = GoogleAnalyticsTracker(project) tracker.track_meltano_init(project_name=project_name) except SubprocessError as err: logger.error(err.stderr) raise
def install(project): """ Installs all the dependencies of your project based on the meltano.yml file. Read more at https://www.meltano.com/docs/command-line-interface.html#command-line-interface. """ install_service = PluginInstallService(project) install_status = install_service.install_all_plugins(install_status_update) num_installed = len(install_status["installed"]) num_failed = len(install_status["errors"]) fg = "green" if num_failed >= 0 and num_installed == 0: fg = "red" elif num_failed > 0 and num_installed > 0: fg = "yellow" click.secho( f"{num_installed}/{num_installed+num_failed} plugins installed.", fg=fg) tracker = GoogleAnalyticsTracker(project) tracker.track_meltano_install()
def start(ctx, reload, bind_port, bind): project = ctx.obj["project"] tracker = GoogleAnalyticsTracker(project) tracker.track_meltano_ui() workers = [] if not truthy(os.getenv("MELTANO_DISABLE_AIRFLOW", False)): try: config_service = ConfigService(project) config_service.find_plugin("airflow") workers.append(AirflowWorker(project)) except PluginMissingError: pass try: compiler_worker = MeltanoCompilerWorker(project) compiler_worker.compiler.compile() workers.append(compiler_worker) except Exception as e: logger.error(f"Initial compilation failed: {e}") workers.append(UIAvailableWorker("http://localhost:{bind_port}")) workers.append( APIWorker( project, f"{bind}:{bind_port}", reload=reload or os.getenv("FLASK_ENV") == "development", )) cleanup = start_workers(workers) def handle_terminate(signal, frame): cleanup() signal.signal(signal.SIGTERM, handle_terminate) logger.info("All workers started.")
def discover(project, plugin_type): discover_service = PluginDiscoveryService(project) if plugin_type == "all": plugin_types = list(PluginType) else: plugin_types = [PluginType.from_cli_argument(plugin_type)] for i, plugin_type in enumerate(plugin_types): if i > 0: click.echo() click.secho(f"{str(plugin_type).capitalize()}", fg="green") for plugin_def in discover_service.get_plugins_of_type(plugin_type): click.echo(plugin_def.name, nl=False) if len(plugin_def.variants) > 1: click.echo(f", variants: {plugin_def.variant_labels}") else: click.echo() tracker = GoogleAnalyticsTracker(project) tracker.track_meltano_discover(plugin_type=plugin_type)
def add_plugin( project: Project, plugin_type: PluginType, plugin_name: str, add_service: ProjectAddService, ): try: plugin = add_service.add(plugin_type, plugin_name) if plugin.should_add_to_file(project): click.secho( f"Added {plugin_type.descriptor} '{plugin_name}' to your Meltano project", fg="green", ) else: click.secho( f"Adding {plugin_type.descriptor} '{plugin_name}' to your Meltano project...", fg="green", ) except PluginAlreadyAddedException as err: click.secho( f"{plugin_type.descriptor.capitalize()} '{plugin_name}' is already in your Meltano project", fg="yellow", err=True, ) plugin = err.plugin except (PluginNotSupportedException, PluginNotFoundError) as err: click.secho( f"Error: {plugin_type.descriptor} '{plugin_name}' is not known to Meltano", fg="red", ) raise click.Abort() tracker = GoogleAnalyticsTracker(project) tracker.track_meltano_add(plugin_type=plugin_type, plugin_name=plugin_name) return plugin
def select(project, extractor, entities_filter, attributes_filter, **flags): try: if flags["list"]: show(project, extractor, show_all=flags["all"]) else: add( project, extractor, entities_filter, attributes_filter, exclude=flags["exclude"], ) tracker = GoogleAnalyticsTracker(project) tracker.track_meltano_select( extractor=extractor, entities_filter=entities_filter, attributes_filter=attributes_filter, flags=flags, ) except PluginLacksCapabilityError as e: logging.exception(e) raise click.ClickException( f"Cannot list the selected properties: " "the tap does not support schema discovery or selection." ) from e except PluginExecutionError as e: logging.exception(e) raise click.ClickException( f"Cannot list the selected properties: " "there was a problem running the tap with `--discover`. " "Make sure the tap supports `--discover` and run " "`meltano invoke {extractor} --discover` to make " "sure it runs correctly." ) from e except Exception as e: raise click.ClickException(str(e)) from e
def init(ctx, project_name, no_usage_stats): if ctx.obj["project"]: logging.warning(f"Found meltano project at: {ctx.obj['project'].root}") raise click.ClickException( "`meltano init` cannot run inside a Meltano project." ) init_service = ProjectInitService(project_name) try: project = init_service.init() init_service.echo_instructions() tracker = GoogleAnalyticsTracker(project) if no_usage_stats: tracker.update_permission_to_track(False) else: tracker.track_meltano_init(project_name=project_name) except ProjectInitServiceError as e: click.secho(f"Directory {project_name} already exists!", fg="red") raise click.Abort()
def orchestrator(ctx, project, plugin_name): add_plugin(ctx.obj["add_service"], project, PluginType.ORCHESTRATORS, plugin_name) tracker = GoogleAnalyticsTracker(project) tracker.track_meltano_add(plugin_type="orchestrator", plugin_name=plugin_name)
def create_app(config={}): project = Project.find() app = Flask(__name__, instance_path=str(project.root), instance_relative_config=True) # make sure we have the latest environment loaded importlib.reload(meltano.api.config) app.config.from_object("meltano.api.config") app.config.from_pyfile("ui.cfg", silent=True) app.config.from_object("meltano.api.config.EnvVarOverrides") app.config.update(**config) if app.env == "production": from meltano.api.config import ensure_secure_setup app.config.from_object("meltano.api.config.Production") ensure_secure_setup(app) # register project_engine(project, engine_uri=app.config["SQLALCHEMY_DATABASE_URI"], default=True) # File logging formatter = logging.Formatter(fmt=FORMAT) file_handler = logging.handlers.RotatingFileHandler(str( project.run_dir("meltano-ui.log")), backupCount=3) file_handler.setFormatter(formatter) logger.setLevel(current_log_level()) logger.addHandler(file_handler) # 1) Extensions security_options = {} from .models import db from .mail import mail from .executor import setup_executor from .security import security, users, setup_security from .security.oauth import setup_oauth from .json import setup_json db.init_app(app) mail.init_app(app) setup_executor(app, project) setup_security(app, project) setup_oauth(app) setup_json(app) # we need to setup CORS for development if app.env == "development": CORS(app, origins="http://localhost:8080", supports_credentials=True) # 2) Register the URL Converters from .url_converters import PluginRefConverter app.url_map.converters["plugin_ref"] = PluginRefConverter # 3) Register the controllers from .controllers.dashboards import dashboardsBP from .controllers.embeds import embedsBP from .controllers.reports import reportsBP from .controllers.repos import reposBP from .controllers.settings import settingsBP from .controllers.sql import sqlBP from .controllers.orchestrations import orchestrationsBP from .controllers.plugins import pluginsBP from .controllers.root import root, api_root app.register_blueprint(dashboardsBP) app.register_blueprint(embedsBP) app.register_blueprint(reportsBP) app.register_blueprint(reposBP) app.register_blueprint(settingsBP) app.register_blueprint(sqlBP) app.register_blueprint(orchestrationsBP) app.register_blueprint(pluginsBP) app.register_blueprint(root) app.register_blueprint(api_root) if app.config["PROFILE"]: from .profiler import init init(app) # Notifications if app.config["MELTANO_NOTIFICATION"]: from .events import notifications notifications.init_app(app) logger.info("Notifications are enabled.") else: logger.info("Notifications are disabled.") # Google Analytics setup tracker = GoogleAnalyticsTracker(project) @app.before_request def setup_js_context(): # setup the appUrl appUrl = urlsplit(request.host_url) g.jsContext = {"appUrl": appUrl.geturl()[:-1]} if tracker.send_anonymous_usage_stats: g.jsContext["isSendAnonymousUsageStats"] = True g.jsContext["trackingID"] = app.config["MELTANO_UI_TRACKING_ID"] g.jsContext["embedTrackingID"] = app.config[ "MELTANO_EMBED_TRACKING_ID"] g.jsContext["projectId"] = tracker.project_id g.jsContext["isNotificationEnabled"] = app.config[ "MELTANO_NOTIFICATION"] g.jsContext["version"] = meltano.__version__ # setup the oauthServiceUrl g.jsContext["oauthServiceUrl"] = app.config[ "MELTANO_OAUTH_SERVICE_URL"] g.jsContext["oauthServiceProviders"] = app.config[ "MELTANO_OAUTH_SERVICE_PROVIDERS"] @app.after_request def after_request(res): res.headers[VERSION_HEADER] = meltano.__version__ return res # create the dispatcher to host the `OAuthService` app.wsgi_app = DispatcherMiddleware(app.wsgi_app, {"/-/oauth": oauth_service}) return app
def loader(ctx, project, plugin_name): add_plugin(ctx.obj["add_service"], project, PluginType.LOADERS, plugin_name) tracker = GoogleAnalyticsTracker(project) tracker.track_meltano_add(plugin_type="loader", plugin_name=plugin_name)
def elt( project, extractor, loader, transform, dry, full_refresh, select, exclude, catalog, state, dump, job_id, force, ): """ meltano elt EXTRACTOR_NAME LOADER_NAME extractor_name: Which extractor should be used in this extraction loader_name: Which loader should be used in this extraction """ select_filter = [*select, *(f"!{entity}" for entity in exclude)] job = Job( job_id=job_id or f'{datetime.datetime.utcnow().strftime("%Y-%m-%dT%H%M%S")}--{extractor}--{loader}' ) _, Session = project_engine(project) session = Session() try: plugins_service = ProjectPluginsService(project) context_builder = _elt_context_builder( project, job, session, extractor, loader, transform, dry_run=dry, full_refresh=full_refresh, select_filter=select_filter, catalog=catalog, state=state, plugins_service=plugins_service, ) if dump: dump_file(context_builder, dump) else: run_async( _run_job(project, job, session, context_builder, force=force)) finally: session.close() tracker = GoogleAnalyticsTracker(project) tracker.track_meltano_elt(extractor=extractor, loader=loader, transform=transform)
def transformer(ctx, project, plugin_name): add_plugin(ctx.obj["add_service"], project, PluginType.TRANSFORMERS, plugin_name) tracker = GoogleAnalyticsTracker(project) tracker.track_meltano_add(plugin_type="transformer", plugin_name=plugin_name)
def create_app(config={}): project = Project.find() setup_logging(project) settings_service = ProjectSettingsService(project) project_engine(project, default=True) app = Flask( __name__, instance_path=str(project.root), instance_relative_config=True ) # make sure we have the latest environment loaded importlib.reload(meltano.api.config) app.config.from_object("meltano.api.config") app.config.from_mapping(**meltano.api.config.ProjectSettings(project).as_dict()) app.config.from_mapping(**config) # File logging file_handler = logging.handlers.RotatingFileHandler( str(project.run_dir("meltano-ui.log")), backupCount=3 ) formatter = logging.Formatter(fmt=FORMAT) file_handler.setFormatter(formatter) logger.addHandler(file_handler) # 1) Extensions security_options = {} from .models import db from .mail import mail from .executor import setup_executor from .security import security, users, setup_security from .security.oauth import setup_oauth from .json import setup_json db.init_app(app) mail.init_app(app) setup_executor(app, project) setup_security(app, project) setup_oauth(app) setup_json(app) # we need to setup CORS for development if app.env == "development": CORS(app, origins="http://localhost:8080", supports_credentials=True) # 2) Register the URL Converters from .url_converters import PluginRefConverter app.url_map.converters["plugin_ref"] = PluginRefConverter # 3) Register the controllers from .controllers.dashboards import dashboardsBP from .controllers.embeds import embedsBP from .controllers.reports import reportsBP from .controllers.repos import reposBP from .controllers.settings import settingsBP from .controllers.sql import sqlBP from .controllers.orchestrations import orchestrationsBP from .controllers.plugins import pluginsBP from .controllers.root import root, api_root app.register_blueprint(dashboardsBP) app.register_blueprint(embedsBP) app.register_blueprint(reportsBP) app.register_blueprint(reposBP) app.register_blueprint(settingsBP) app.register_blueprint(sqlBP) app.register_blueprint(orchestrationsBP) app.register_blueprint(pluginsBP) app.register_blueprint(root) app.register_blueprint(api_root) if app.config["PROFILE"]: from .profiler import init init(app) # Notifications if settings_service.get("ui.notification"): from .events import notifications notifications.init_app(app) logger.debug("Notifications are enabled.") else: logger.debug("Notifications are disabled.") # Google Analytics setup tracker = GoogleAnalyticsTracker(project) @app.before_request def setup_js_context(): # setup the appUrl appUrl = urlsplit(request.host_url) g.jsContext = {"appUrl": appUrl.geturl()[:-1], "version": meltano.__version__} setting_map = { "isSendAnonymousUsageStats": "send_anonymous_usage_stats", "projectId": "project_id", "trackingID": "tracking_ids.ui", "embedTrackingID": "tracking_ids.ui_embed", "isProjectReadonlyEnabled": "project_readonly", "isReadonlyEnabled": "ui.readonly", "isAnonymousReadonlyEnabled": "ui.anonymous_readonly", "isNotificationEnabled": "ui.notification", "isAnalysisEnabled": "ui.analysis", "logoUrl": "ui.logo_url", "oauthServiceUrl": "oauth_service.url", } for context_key, setting_name in setting_map.items(): g.jsContext[context_key] = settings_service.get(setting_name) providers = settings_service.get("oauth_service.providers") g.jsContext["oauthServiceProviders"] = [ provider for provider in providers.split(",") if provider ] @app.after_request def after_request(res): res.headers[VERSION_HEADER] = meltano.__version__ return res @app.errorhandler(500) def internal_error(exception): logger.info(f"Error: {exception}") return jsonify({"error": True, "code": str(exception)}), 500 @app.errorhandler(ProjectReadonly) def _handle(ex): return (jsonify({"error": True, "code": str(ex)}), HTTP_READONLY_CODE) # create the dispatcher to host the `OAuthService` app.wsgi_app = DispatcherMiddleware( app.wsgi_app, {"/-/oauth": create_oauth_service()} ) return app
def extractor(ctx, project, plugin_name): add_plugin(ctx.obj["add_service"], project, PluginType.EXTRACTORS, plugin_name) tracker = GoogleAnalyticsTracker(project) tracker.track_meltano_add(plugin_type="extractor", plugin_name=plugin_name)
def model(ctx, project, plugin_name): add_plugin(ctx.obj["add_service"], project, PluginType.MODELS, plugin_name) tracker = GoogleAnalyticsTracker(project) tracker.track_meltano_add(plugin_type="model", plugin_name=plugin_name)