def create_app_with_reconstructable_repo(recon_repo, instance, reloader=None): check.inst_param(recon_repo, 'recon_repo', ReconstructableRepository) check.inst_param(instance, 'instance', DagsterInstance) check.opt_inst_param(reloader, 'reloader', Reloader) warn_if_compute_logs_disabled() print('Loading repository...') context = DagsterGraphQLContext( instance=instance, locations=[InProcessRepositoryLocation(recon_repo, reloader=reloader)], version=__version__, ) # Automatically initialize scheduler everytime Dagit loads scheduler = instance.scheduler repository = context.legacy_get_repository_definition() if repository.schedule_defs: if scheduler: python_path = sys.executable repository_path = context.legacy_location.get_reconstructable_repository( ).yaml_path reconcile_scheduler_state(python_path, repository_path, repository=repository, instance=instance) else: warnings.warn(MISSING_SCHEDULER_WARNING) return instantiate_app_with_views(context)
def create_app_with_execution_handle(handle, instance, reloader=None): check.inst_param(handle, 'handle', ExecutionTargetHandle) check.inst_param(instance, 'instance', DagsterInstance) check.opt_inst_param(reloader, 'reloader', Reloader) execution_manager = get_execution_manager(instance) warn_if_compute_logs_disabled() print('Loading repository...') context = DagsterGraphQLContext( handle=handle, instance=instance, execution_manager=execution_manager, reloader=reloader, version=__version__, ) # Automatically initialize scheduler everytime Dagit loads scheduler_handle = context.scheduler_handle scheduler = instance.scheduler if scheduler_handle: if scheduler: handle = context.get_handle() python_path = sys.executable repository_path = handle.data.repository_yaml repository = context.get_repository() scheduler_handle.up( python_path, repository_path, repository=repository, instance=instance ) else: warnings.warn(MISSING_SCHEDULER_WARNING) return instantiate_app_with_views(context)
def test_execute_hammer_through_dagit(): recon_repo = ReconstructableRepository.for_file( file_relative_path(__file__, '../../../../examples/dagster_examples/toys/hammer.py'), 'hammer_pipeline', ) instance = DagsterInstance.local_temp() context = DagsterGraphQLContext( locations=[InProcessRepositoryLocation(recon_repo)], instance=instance, ) selector = get_legacy_pipeline_selector(context, 'hammer_pipeline') executor = SyncExecutor() variables = { 'executionParams': { 'runConfigData': { 'storage': {'filesystem': {}}, 'execution': {'dask': {'config': {'cluster': {'local': {}}}}}, }, 'selector': selector, 'mode': 'default', } } start_pipeline_result = graphql( request_string=START_PIPELINE_EXECUTION_MUTATION, schema=create_schema(), context=context, variables=variables, executor=executor, ) if start_pipeline_result.errors: raise Exception('{}'.format(start_pipeline_result.errors)) run_id = start_pipeline_result.data['startPipelineExecution']['run']['runId'] context.drain_outstanding_executions() subscription = execute_dagster_graphql(context, SUBSCRIPTION_QUERY, variables={'runId': run_id}) subscribe_results = [] subscription.subscribe(subscribe_results.append) messages = [x['__typename'] for x in subscribe_results[0].data['pipelineRunLogs']['messages']] assert 'PipelineStartEvent' in messages assert 'PipelineSuccessEvent' in messages
def define_context(raise_on_error=True, instance=None): return DagsterGraphQLContext( handle=ExecutionTargetHandle.for_repo_fn(define_repository), instance=instance or DagsterInstance.ephemeral(), execution_manager=SynchronousExecutionManager(), raise_on_error=raise_on_error, )
def define_examples_context(raise_on_error=True): return DagsterGraphQLContext( handle=ExecutionTargetHandle.for_repo_module('dagster_examples', 'define_demo_repo'), pipeline_runs=PipelineRunStorage(), execution_manager=SynchronousExecutionManager(), raise_on_error=raise_on_error, )
def define_context(raise_on_error=True): return DagsterGraphQLContext( RepositoryContainer(repository=define_repository()), PipelineRunStorage(), execution_manager=SynchronousExecutionManager(), raise_on_error=raise_on_error, )
def define_subprocess_context_for_file(python_file, fn_name, instance=None): return DagsterGraphQLContext( handle=ExecutionTargetHandle.for_repo_python_file( python_file, fn_name), instance=instance or DagsterInstance.ephemeral(), execution_manager=SubprocessExecutionManager(instance), )
def define_context(raise_on_error=True, log_dir=None): return DagsterGraphQLContext( handle=ExecutionTargetHandle.for_repo_fn(define_repository), pipeline_runs=PipelineRunStorage(log_dir), execution_manager=SynchronousExecutionManager(), raise_on_error=raise_on_error, )
def create_app_from_workspace(workspace, instance): check.inst_param(workspace, 'workspace', Workspace) check.inst_param(instance, 'instance', DagsterInstance) warn_if_compute_logs_disabled() print('Loading repository...') locations = [] for repository_location_handle in workspace.repository_location_handles: if isinstance(repository_location_handle, InProcessRepositoryLocationHandle): # will need to change for multi repo check.invariant( len(repository_location_handle.repository_code_pointer_dict) == 1) pointer = next( iter(repository_location_handle.repository_code_pointer_dict. values())) recon_repo = ReconstructableRepository(pointer) locations.append(InProcessRepositoryLocation(recon_repo)) elif isinstance(repository_location_handle, PythonEnvRepositoryLocationHandle): locations.append( PythonEnvRepositoryLocation(repository_location_handle)) else: check.failed('{} unsupported'.format(repository_location_handle)) context = DagsterGraphQLContext(instance=instance, locations=locations, version=__version__) return instantiate_app_with_views(context)
def create_app(handle, instance): check.inst_param(handle, 'handle', ExecutionTargetHandle) check.inst_param(instance, 'instance', DagsterInstance) app = Flask('dagster-ui') sockets = Sockets(app) app.app_protocol = lambda environ_path_info: 'graphql-ws' schema = create_schema() subscription_server = DagsterSubscriptionServer(schema=schema) execution_manager = MultiprocessingExecutionManager() print('Loading repository...') context = DagsterGraphQLContext(handle=handle, instance=instance, execution_manager=execution_manager, version=__version__) app.add_url_rule( '/graphql', 'graphql', DagsterGraphQLView.as_view( 'graphql', schema=schema, graphiql=True, # XXX(freiksenet): Pass proper ws url graphiql_template=PLAYGROUND_TEMPLATE, executor=Executor(), context=context, ), ) sockets.add_url_rule( '/graphql', 'graphql', dagster_graphql_subscription_view(subscription_server, context)) app.add_url_rule( # should match the `build_local_download_url` '/download/<string:run_id>/<string:step_key>/<string:file_type>', 'download_view', download_view(context), ) # these routes are specifically for the Dagit UI and are not part of the graphql # API that we want other people to consume, so they're separate for now. # Also grabbing the magic global request args dict so that notebook_view is testable app.add_url_rule('/dagit/notebook', 'notebook', lambda: notebook_view(request.args)) app.add_url_rule('/static/<path:path>/<string:file>', 'static_view', static_view) app.add_url_rule('/vendor/<path:path>/<string:file>', 'vendor_view', vendor_view) app.add_url_rule('/<path:_path>', 'index_catchall', index_view) app.add_url_rule('/', 'index', index_view, defaults={'_path': ''}) CORS(app) return app
def define_examples_context(): return DagsterGraphQLContext( handle=ExecutionTargetHandle.for_repo_module('dagster_examples', 'define_demo_repo'), instance=DagsterInstance.ephemeral(), execution_manager=SynchronousExecutionManager(), )
def define_context_for_repository_yaml(path, instance): check.inst_param(instance, 'instance', DagsterInstance) return DagsterGraphQLContext( locations=[ InProcessRepositoryLocation(ReconstructableRepository.from_legacy_repository_yaml(path)) ], instance=instance, )
def define_test_snapshot_context(): return DagsterGraphQLContext( instance=DagsterInstance.ephemeral(), workspace=Workspace([ RepositoryLocationHandle.create_in_process_location( create_main_recon_repo().pointer) ]), )
def test_pipelines_python_error(): ctx = DagsterGraphQLContext( handle=ExecutionTargetHandle.for_repo_fn(define_error_pipeline_repo), pipeline_runs=InMemoryRunStorage(), execution_manager=SynchronousExecutionManager(), ) result = execute_dagster_graphql(ctx, PIPELINES) assert result.data['pipelinesOrError']['__typename'] == "PythonError"
def define_context_for_file(python_file, fn_name, instance): check.inst_param(instance, 'instance', DagsterInstance) return DagsterGraphQLContext( locations=[ InProcessRepositoryLocation(ReconstructableRepository.for_file(python_file, fn_name)) ], instance=instance, )
def define_context(raise_on_error=True, log_dir=None, schedule_dir=None): return DagsterGraphQLContext( handle=ExecutionTargetHandle.for_repo_fn(define_repository), pipeline_runs=FilesystemRunStorage(base_dir=log_dir) if log_dir else InMemoryRunStorage(), scheduler=TestSystemCronScheduler(schedule_dir) if schedule_dir else None, execution_manager=SynchronousExecutionManager(), raise_on_error=raise_on_error, )
def test_pipelines_python_error(): ctx = DagsterGraphQLContext( RepositoryContainer(repository=define_error_pipeline_repo()), PipelineRunStorage(), execution_manager=SynchronousExecutionManager(), ) result = execute_dagster_graphql(ctx, PIPELINES) assert result.data['pipelinesOrError']['__typename'] == "PythonError"
def create_app(handle, pipeline_run_storage, use_synchronous_execution_manager=False): check.inst_param(handle, 'handle', ExecutionTargetHandle) check.inst_param(pipeline_run_storage, 'pipeline_run_storage', PipelineRunStorage) check.bool_param(use_synchronous_execution_manager, 'use_synchronous_execution_manager') app = Flask('dagster-ui') sockets = Sockets(app) app.app_protocol = lambda environ_path_info: 'graphql-ws' schema = create_schema() subscription_server = DagsterSubscriptionServer(schema=schema) if use_synchronous_execution_manager: execution_manager = SynchronousExecutionManager() else: execution_manager = MultiprocessingExecutionManager() context = DagsterGraphQLContext( handle=handle, pipeline_runs=pipeline_run_storage, execution_manager=execution_manager, version=__version__, ) app.add_url_rule( '/graphql', 'graphql', DagsterGraphQLView.as_view( 'graphql', schema=schema, graphiql=True, # XXX(freiksenet): Pass proper ws url graphiql_template=PLAYGROUND_TEMPLATE, executor=Executor(), context=context, ), ) sockets.add_url_rule( '/graphql', 'graphql', dagster_graphql_subscription_view(subscription_server, context)) # these routes are specifically for the Dagit UI and are not part of the graphql # API that we want other people to consume, so they're separate for now. # Also grabbing the magic glabl request args dict so that notebook_view is testable app.add_url_rule('/dagit/notebook', 'notebook', lambda: notebook_view(request.args)) app.add_url_rule('/static/<path:path>/<string:file>', 'static_view', static_view) app.add_url_rule('/<path:_path>', 'index_catchall', index_view) app.add_url_rule('/', 'index', index_view, defaults={'_path': ''}) CORS(app) return app
def define_context_for_file(python_file, fn_name, instance): check.inst_param(instance, "instance", DagsterInstance) return DagsterGraphQLContext( workspace=Workspace([ RepositoryLocationHandle.create_in_process_location( CodePointer.from_python_file(python_file, fn_name, None)) ]), instance=instance, )
def define_examples_context(): return DagsterGraphQLContext( locations=[ InProcessRepositoryLocation( ReconstructableRepository.for_module( 'dagster_examples', 'define_internal_dagit_repository'), ) ], instance=DagsterInstance.ephemeral(), )
def define_in_process_context(python_file, fn_name, instance): check.inst_param(instance, "instance", DagsterInstance) return DagsterGraphQLContext( workspace=Workspace([ InProcessRepositoryLocationOrigin( ReconstructableRepository.for_file(python_file, fn_name)) ]), instance=instance, )
def define_context_for_repository_yaml(path, instance): check.inst_param(instance, "instance", DagsterInstance) return DagsterGraphQLContext( workspace=Workspace([ RepositoryLocationHandle.create_in_process_location( ReconstructableRepository.from_legacy_repository_yaml( path).pointer) ]), instance=instance, )
def define_examples_context(): return DagsterGraphQLContext( environments=[ InProcessDagsterEnvironment( ReconstructableRepository.for_module('dagster_examples', 'define_demo_repo'), execution_manager=SynchronousExecutionManager(), ) ], instance=DagsterInstance.ephemeral(), )
def define_context_for_repository_yaml(path, instance): check.inst_param(instance, 'instance', DagsterInstance) return DagsterGraphQLContext( environments=[ InProcessDagsterEnvironment( ReconstructableRepository.from_yaml(path), execution_manager=SynchronousExecutionManager(), ) ], instance=instance, )
def define_subprocess_context_for_file(python_file, fn_name, instance): check.inst_param(instance, 'instance', DagsterInstance) return DagsterGraphQLContext( environments=[ InProcessDagsterEnvironment( ReconstructableRepository.for_file(python_file, fn_name), execution_manager=SubprocessExecutionManager(instance), ) ], instance=instance, )
def define_test_snapshot_context(): return DagsterGraphQLContext( instance=DagsterInstance.ephemeral(), environments=[ InProcessDagsterEnvironment( ReconstructableRepository.for_file(__file__, 'define_repository'), execution_manager=SynchronousExecutionManager(), ) ], )
def create_app_with_environments(dagster_environments, instance): check.list_param(dagster_environments, 'dagster_environments', of_type=DagsterEnvironment) check.inst_param(instance, 'instance', DagsterInstance) warn_if_compute_logs_disabled() print('Loading repository...') context = DagsterGraphQLContext( environments=dagster_environments, instance=instance, version=__version__, ) return instantiate_app_with_views(context)
def test_pipelines_or_error_invalid(): context = DagsterGraphQLContext( handle=ExecutionTargetHandle.for_repo_fn(define_test_repository), instance=DagsterInstance.ephemeral(), execution_manager=SynchronousExecutionManager(), ) result = execute_dagster_graphql( context, '{ pipelinesOrError { ... on PythonError { message } } }') msg = result.data['pipelinesOrError']['message'] assert 'circular reference detected in solid "csolid"' in msg
def define_out_of_process_context(python_file, fn_name, instance): check.inst_param(instance, 'instance', DagsterInstance) return DagsterGraphQLContext( locations=[ PythonEnvRepositoryLocation( RepositoryLocationHandle.create_out_of_process_location( 'test_location', {fn_name: FileCodePointer(python_file, fn_name)})) ], instance=instance, )
def test_pipelines_or_error_invalid(): context = DagsterGraphQLContext( handle=ExecutionTargetHandle.for_repo_fn(define_test_repository), pipeline_runs=PipelineRunStorage(), execution_manager=SynchronousExecutionManager(), ) result = execute_dagster_graphql( context, '{ pipelinesOrError { ... on InvalidDefinitionError { message } } }' ) msg = result.data['pipelinesOrError']['message'] assert "Circular reference detected in solid csolid" in msg