def test_max_concurrency_zero(): handle = ExecutionTargetHandle.for_pipeline_python_file( __file__, 'infinite_loop_pipeline') with safe_tempfile_path() as filepath: instance = DagsterInstance.local_temp() execution_manager = QueueingSubprocessExecutionManager( instance, max_concurrent_runs=0) pipeline_run = instance.create_run_for_pipeline( pipeline_def=infinite_loop_pipeline, environment_dict={ 'solids': { 'loop': { 'config': { 'file': filepath } } } }, ) execution_manager.execute_pipeline(handle, infinite_loop_pipeline, pipeline_run, instance) assert not execution_manager.is_active(pipeline_run.run_id) assert not os.path.exists(filepath)
def get_execution_manager(instance): execution_manager_settings = instance.dagit_settings.get('execution_manager') if execution_manager_settings and execution_manager_settings.get('max_concurrent_runs'): return QueueingSubprocessExecutionManager( instance, execution_manager_settings.get('max_concurrent_runs') ) return SubprocessExecutionManager(instance)
def test_max_concurrency_one(): handle = ExecutionTargetHandle.for_pipeline_python_file( __file__, 'infinite_loop_pipeline') pipeline_def = handle.build_pipeline_definition() with safe_tempfile_path() as file_one, safe_tempfile_path() as file_two: instance = DagsterInstance.local_temp() execution_manager = QueueingSubprocessExecutionManager( instance, max_concurrent_runs=1) run_one = instance.create_run_for_pipeline( pipeline_def=pipeline_def, environment_dict={ 'solids': { 'loop': { 'config': { 'file': file_one } } } }, ) run_two = instance.create_run_for_pipeline( pipeline_def=pipeline_def, environment_dict={ 'solids': { 'loop': { 'config': { 'file': file_two } } } }, ) execution_manager.execute_pipeline(handle, infinite_loop_pipeline, run_one, instance) execution_manager.execute_pipeline(handle, infinite_loop_pipeline, run_two, instance) while not os.path.exists(file_one): execution_manager.check() time.sleep(0.1) assert execution_manager.is_active(run_one.run_id) assert not execution_manager.is_active(run_two.run_id) assert not os.path.exists(file_two) assert execution_manager.terminate(run_one.run_id) while not os.path.exists(file_two): execution_manager.check() time.sleep(0.1) assert not execution_manager.is_active(run_one.run_id) assert execution_manager.is_active(run_two.run_id) assert execution_manager.terminate(run_two.run_id)
def create_app(handle, instance, reloader=None): check.inst_param(handle, 'handle', ExecutionTargetHandle) check.inst_param(instance, 'instance', DagsterInstance) check.opt_inst_param(reloader, 'reloader', Reloader) app = Flask('dagster-ui') sockets = Sockets(app) app.app_protocol = lambda environ_path_info: 'graphql-ws' schema = create_schema() subscription_server = DagsterSubscriptionServer(schema=schema) execution_manager_settings = instance.dagit_settings.get( 'execution_manager') if execution_manager_settings and execution_manager_settings.get( 'max_concurrent_runs'): execution_manager = QueueingSubprocessExecutionManager( instance, execution_manager_settings.get('max_concurrent_runs')) else: execution_manager = SubprocessExecutionManager(instance) warn_if_compute_logs_disabled() print('Loading repository...') context = DagsterGraphQLContext( handle=handle, instance=instance, execution_manager=execution_manager, reloader=reloader, version=__version__, ) # Automatically initialize scheduler everytime Dagit loads scheduler_handle = context.scheduler_handle scheduler = instance.scheduler if scheduler_handle: if scheduler: handle = context.get_handle() python_path = sys.executable repository_path = handle.data.repository_yaml repository = context.get_repository() scheduler_handle.up(python_path, repository_path, repository=repository, instance=instance) else: warnings.warn(MISSING_SCHEDULER_WARNING) app.add_url_rule( '/graphql', 'graphql', DagsterGraphQLView.as_view( 'graphql', schema=schema, graphiql=True, # XXX(freiksenet): Pass proper ws url graphiql_template=PLAYGROUND_TEMPLATE, executor=Executor(), context=context, ), ) sockets.add_url_rule( '/graphql', 'graphql', dagster_graphql_subscription_view(subscription_server, context)) app.add_url_rule( # should match the `build_local_download_url` '/download/<string:run_id>/<string:step_key>/<string:file_type>', 'download_view', download_view(context), ) # these routes are specifically for the Dagit UI and are not part of the graphql # API that we want other people to consume, so they're separate for now. # Also grabbing the magic global request args dict so that notebook_view is testable app.add_url_rule('/dagit/notebook', 'notebook', lambda: notebook_view(request.args)) app.add_url_rule('/static/<path:path>/<string:file>', 'static_view', static_view) app.add_url_rule('/vendor/<path:path>/<string:file>', 'vendor_view', vendor_view) app.add_url_rule('/<string:worker_name>.worker.js', 'worker_view', worker_view) app.add_url_rule('/dagit_info', 'sanity_view', info_view) app.add_url_rule('/<path:_path>', 'index_catchall', index_view) app.add_url_rule('/', 'index', index_view, defaults={'_path': ''}) CORS(app) return app