def _execute_run_command_body( output_file, recon_repo, pipeline_run_id, instance_ref_json, ): with ipc_write_stream(output_file) as stream: instance = _get_instance(stream, instance_ref_json) if not instance: return pipeline_run = instance.get_run_by_id(pipeline_run_id) pid = os.getpid() instance.report_engine_event( 'Started process for pipeline (pid: {pid}).'.format(pid=pid), pipeline_run, EngineEventData.in_process(pid, marker_end='cli_api_subprocess_init'), ) recon_pipeline = _recon_pipeline(stream, recon_repo, pipeline_run) # Perform setup so that termination of the execution will unwind and report to the # instance correctly setup_interrupt_support() try: for event in execute_run_iterator(recon_pipeline, pipeline_run, instance): stream.send(event) except DagsterSubprocessError as err: if not all([ err_info.cls_name == 'KeyboardInterrupt' for err_info in err.subprocess_error_infos ]): instance.report_engine_event( 'An exception was thrown during execution that is likely a framework error, ' 'rather than an error in user code.', pipeline_run, EngineEventData.engine_error( serializable_error_info_from_exc_info(sys.exc_info())), ) except Exception: # pylint: disable=broad-except instance.report_engine_event( 'An exception was thrown during execution that is likely a framework error, ' 'rather than an error in user code.', pipeline_run, EngineEventData.engine_error( serializable_error_info_from_exc_info(sys.exc_info())), ) finally: instance.report_engine_event( 'Process for pipeline exited (pid: {pid}).'.format(pid=pid), pipeline_run, )
def _execute_run_command_body(recon_pipeline, pipeline_run_id, instance, write_stream_fn): # we need to send but the fact that we have loaded the args so the calling # process knows it is safe to clean up the temp input file write_stream_fn(ExecuteRunArgsLoadComplete()) pipeline_run = instance.get_run_by_id(pipeline_run_id) pid = os.getpid() instance.report_engine_event( "Started process for pipeline (pid: {pid}).".format(pid=pid), pipeline_run, EngineEventData.in_process(pid, marker_end="cli_api_subprocess_init"), ) # Perform setup so that termination of the execution will unwind and report to the # instance correctly setup_interrupt_support() try: for event in execute_run_iterator(recon_pipeline, pipeline_run, instance): write_stream_fn(event) except KeyboardInterrupt: instance.report_engine_event( message="Pipeline execution terminated by interrupt", pipeline_run=pipeline_run, ) except DagsterSubprocessError as err: if not all([ err_info.cls_name == "KeyboardInterrupt" for err_info in err.subprocess_error_infos ]): instance.report_engine_event( "An exception was thrown during execution that is likely a framework error, " "rather than an error in user code.", pipeline_run, EngineEventData.engine_error( serializable_error_info_from_exc_info(sys.exc_info())), ) except Exception: # pylint: disable=broad-except instance.report_engine_event( "An exception was thrown during execution that is likely a framework error, " "rather than an error in user code.", pipeline_run, EngineEventData.engine_error( serializable_error_info_from_exc_info(sys.exc_info())), ) finally: instance.report_engine_event( "Process for pipeline exited (pid: {pid}).".format(pid=pid), pipeline_run, )
def _execute_run_command_body(output_file, recon_pipeline, pipeline_run_id, instance_ref): with ipc_write_stream(output_file) as stream: # we need to send but the fact that we have loaded the args so the calling # process knows it is safe to clean up the temp input file stream.send(ExecuteRunArgsLoadComplete()) instance = DagsterInstance.from_ref(instance_ref) pipeline_run = instance.get_run_by_id(pipeline_run_id) pid = os.getpid() instance.report_engine_event( 'Started process for pipeline (pid: {pid}).'.format(pid=pid), pipeline_run, EngineEventData.in_process(pid, marker_end='cli_api_subprocess_init'), ) # Perform setup so that termination of the execution will unwind and report to the # instance correctly setup_interrupt_support() try: for event in execute_run_iterator(recon_pipeline, pipeline_run, instance): stream.send(event) except DagsterSubprocessError as err: if not all([ err_info.cls_name == 'KeyboardInterrupt' for err_info in err.subprocess_error_infos ]): instance.report_engine_event( 'An exception was thrown during execution that is likely a framework error, ' 'rather than an error in user code.', pipeline_run, EngineEventData.engine_error( serializable_error_info_from_exc_info(sys.exc_info())), ) except Exception: # pylint: disable=broad-except instance.report_engine_event( 'An exception was thrown during execution that is likely a framework error, ' 'rather than an error in user code.', pipeline_run, EngineEventData.engine_error( serializable_error_info_from_exc_info(sys.exc_info())), ) finally: instance.report_engine_event( 'Process for pipeline exited (pid: {pid}).'.format(pid=pid), pipeline_run, )
def __init__(self, host='localhost', port=None, socket=None, max_workers=1): setup_interrupt_support() check.opt_str_param(host, 'host') check.opt_int_param(port, 'port') check.opt_str_param(socket, 'socket') check.invariant( port is not None if seven.IS_WINDOWS else True, 'You must pass a valid `port` on Windows: `socket` not supported.', ) check.invariant( (port or socket) and not (port and socket), 'You must pass one and only one of `port` or `socket`.', ) check.invariant( host is not None if port else True, 'Must provide a host when serving on a port', ) self.server = grpc.server(ThreadPoolExecutor(max_workers=max_workers)) add_DagsterApiServicer_to_server(DagsterApiServer(), self.server) if port: server_address = host + ':' + str(port) else: server_address = 'unix:' + os.path.abspath(socket) # grpc.Server.add_insecure_port returns: # - 0 on failure # - port number when a port is successfully bound # - 1 when a UDS is successfully bound res = self.server.add_insecure_port(server_address) if socket and res != 1: print(SERVER_FAILED_TO_BIND_TOKEN) raise CouldNotBindGrpcServerToAddress(socket) if port and res != port: print(SERVER_FAILED_TO_BIND_TOKEN) raise CouldNotBindGrpcServerToAddress(port)
'''Test a chain of child processes with interrupt support, ensure that interrupts can be correctly propagated and handled.''' import sys import time from dagster.serdes.ipc import ( interrupt_ipc_subprocess, open_ipc_subprocess, setup_interrupt_support, ) from dagster.utils import file_relative_path if __name__ == '__main__': setup_interrupt_support() ( child_opened_sentinel, parent_interrupt_sentinel, child_started_sentinel, child_interrupt_sentinel, ) = (sys.argv[1], sys.argv[2], sys.argv[3], sys.argv[4]) child_process = open_ipc_subprocess([ sys.executable, file_relative_path(__file__, 'subprocess_with_interrupt_support.py'), child_started_sentinel, child_interrupt_sentinel, ]) with open(child_opened_sentinel, 'w') as fd: fd.write('opened_ipc_subprocess') try: while True: