def launch_run(self, instance, run, external_pipeline): check.inst_param(external_pipeline, 'external_pipeline', ExternalPipeline) self.validate() variables = { 'repositoryLocationName': external_pipeline.handle.location_name, 'repositoryName': external_pipeline.handle.repository_name, 'runId': run.run_id, } response = requests.post( urljoin(self._address, '/graphql'), params={ 'query': EXECUTE_RUN_IN_PROCESS_MUTATION, 'variables': seven.json.dumps(variables), }, timeout=self._timeout, ) response.raise_for_status() result = response.json()['data']['executeRunInProcess'] if result['__typename'] in [ 'LaunchPipelineRunSuccess', 'PipelineConfigValidationInvalid' ]: return self._instance.get_run_by_id(run.run_id) raise DagsterLaunchFailedError( 'Failed to launch run with {cls} targeting {address}:\n{result}'. format(cls=self.__class__.__name__, address=self._address, result=result))
def launch_run(self, instance, run): self.validate() variables = {'runId': run.run_id} response = requests.post( urljoin(self._address, '/graphql'), params={ 'query': START_PIPELINE_EXECUTION_FOR_CREATED_RUN_MUTATION, 'variables': seven.json.dumps(variables), }, timeout=self._timeout, ) response.raise_for_status() result = response.json()['data']['startPipelineExecutionForCreatedRun'] if result['__typename'] == 'StartPipelineRunSuccess': return run.with_status(PipelineRunStatus(result['run']['status'])) if result['__typename'] == 'PipelineConfigValidationInvalid': return run.run_with_status(PipelineRunStatus.FAILURE) raise DagsterLaunchFailedError( 'Failed to launch run with {cls} targeting {address}:\n{result}'. format(cls=self.__class__.__name__, address=self._address, result=result))
def launch_run(self, instance, run): self.validate() execution_params = execution_params_from_pipeline_run(run) variables = {'executionParams': execution_params.to_graphql_input()} instance.create_run(run) response = requests.post( urljoin(self._address, '/graphql'), params={ 'query': START_PIPELINE_EXECUTION_MUTATION, 'variables': seven.json.dumps(variables), }, timeout=self._timeout, ) response.raise_for_status() result = response.json()['data']['startPipelineExecution'] if result['__typename'] == 'StartPipelineExecutionSuccess': return run.run_with_status( PipelineRunStatus(result['run']['status'])) raise DagsterLaunchFailedError( 'Failed to launch run with {cls} targeting {address}:\n{result}'. format(cls=self.__class__.__name__, address=self._address, result=result))
def launch_run(self, instance, run, external_pipeline): check.inst_param(external_pipeline, "external_pipeline", ExternalPipeline) self.validate() variables = { "repositoryLocationName": external_pipeline.handle.location_name, "repositoryName": external_pipeline.handle.repository_name, "runId": run.run_id, } response = requests.post( urljoin(self._address, "/graphql"), params={ "query": EXECUTE_RUN_IN_PROCESS_MUTATION, "variables": seven.json.dumps(variables), }, timeout=self._timeout, ) response.raise_for_status() result = response.json()["data"]["executeRunInProcess"] if result["__typename"] in ["LaunchPipelineRunSuccess", "PipelineConfigValidationInvalid"]: return self._instance.get_run_by_id(run.run_id) raise DagsterLaunchFailedError( "Failed to launch run with {cls} targeting {address}:\n{result}".format( cls=self.__class__.__name__, address=self._address, result=result ) )
def launch_run(self, instance, run, external_pipeline): '''Subclasses must implement this method.''' check.inst_param(run, 'run', PipelineRun) check.inst_param(external_pipeline, 'external_pipeline', ExternalPipeline) repository_location_handle = external_pipeline.repository_handle.repository_location_handle check.inst( repository_location_handle, GRPC_REPOSITORY_LOCATION_HANDLE_TYPES, 'GrpcRunLauncher: Can\'t launch runs for pipeline not loaded from a GRPC server', ) res = repository_location_handle.client.start_run( ExecuteRunArgs( pipeline_origin=external_pipeline.get_origin(), pipeline_run_id=run.run_id, instance_ref=self._instance.get_ref(), )) if not res.success: raise (DagsterLaunchFailedError( res.message, serializable_error_info=res.serializable_error_info)) self._run_id_to_repository_location_handle_cache[ run.run_id] = repository_location_handle return run
def launch_run(self, context: LaunchRunContext) -> None: run = context.pipeline_run check.inst_param(run, "run", PipelineRun) if not context.workspace: raise DagsterInvariantViolationError( "DefaultRunLauncher requires a workspace to be included in its LaunchRunContext" ) external_pipeline_origin = check.not_none(run.external_pipeline_origin) repository_location = context.workspace.get_location( external_pipeline_origin.external_repository_origin.repository_location_origin.location_name ) check.inst( repository_location, GrpcServerRepositoryLocation, "DefaultRunLauncher: Can't launch runs for pipeline not loaded from a GRPC server", ) self._instance.add_run_tags( run.run_id, { GRPC_INFO_TAG: seven.json.dumps( merge_dicts( {"host": repository_location.host}, ( {"port": repository_location.port} if repository_location.port else {"socket": repository_location.socket} ), ({"use_ssl": True} if repository_location.use_ssl else {}), ) ) }, ) res = deserialize_as( repository_location.client.start_run( ExecuteExternalPipelineArgs( pipeline_origin=external_pipeline_origin, pipeline_run_id=run.run_id, instance_ref=self._instance.get_ref(), ) ), StartRunResult, ) if not res.success: raise ( DagsterLaunchFailedError( res.message, serializable_error_info=res.serializable_error_info ) ) self._run_ids.add(run.run_id) if self._wait_for_processes: self._locations_to_wait_for.append(repository_location)
def launch_run(self, run, external_pipeline): check.inst_param(run, "run", PipelineRun) check.inst_param(external_pipeline, "external_pipeline", ExternalPipeline) repository_location_handle = external_pipeline.repository_handle.repository_location_handle check.inst( repository_location_handle, GRPC_REPOSITORY_LOCATION_HANDLE_TYPES, "DefaultRunLauncher: Can't launch runs for pipeline not loaded from a GRPC server", ) self._instance.add_run_tags( run.run_id, { GRPC_INFO_TAG: seven.json.dumps( merge_dicts( {"host": repository_location_handle.host}, ({ "port": repository_location_handle.port } if repository_location_handle.port else { "socket": repository_location_handle.socket }), ({ "use_ssl": True } if repository_location_handle.use_ssl else {}), )) }, ) res = repository_location_handle.client.start_run( ExecuteExternalPipelineArgs( pipeline_origin=external_pipeline.get_external_origin(), pipeline_run_id=run.run_id, instance_ref=self._instance.get_ref(), )) if not res.success: raise (DagsterLaunchFailedError( res.message, serializable_error_info=res.serializable_error_info)) self._run_ids.add(run.run_id) if self._wait_for_processes and isinstance( repository_location_handle, ManagedGrpcPythonEnvRepositoryLocationHandle): self._processes_to_wait_for.append( repository_location_handle.grpc_server_process) return run
def validate(self): if self._validated: return try: sanity_check = requests.get(urljoin(self._address, '/dagit_info'), timeout=self._timeout) self._validated = sanity_check.status_code = 200 and 'dagit' in sanity_check.text except RequestException: self._validated = False if not self._validated: raise DagsterLaunchFailedError( 'Host {host} failed sanity check. It is not a dagit server.'. format(host=self._address), )
def launch_run(self, instance, run, external_pipeline): """Subclasses must implement this method.""" check.inst_param(run, "run", PipelineRun) check.inst_param(external_pipeline, "external_pipeline", ExternalPipeline) repository_location_handle = external_pipeline.repository_handle.repository_location_handle check.inst( repository_location_handle, GRPC_REPOSITORY_LOCATION_HANDLE_TYPES, "GrpcRunLauncher: Can't launch runs for pipeline not loaded from a GRPC server", ) self._instance.add_run_tags( run.run_id, { GRPC_INFO_TAG: seven.json.dumps( merge_dicts( {"host": repository_location_handle.host}, {"port": repository_location_handle.port} if repository_location_handle.port else {"socket": repository_location_handle.socket}, ) ) }, ) res = repository_location_handle.client.start_run( ExecuteRunArgs( pipeline_origin=external_pipeline.get_origin(), pipeline_run_id=run.run_id, instance_ref=self._instance.get_ref(), ) ) if not res.success: raise ( DagsterLaunchFailedError( res.message, serializable_error_info=res.serializable_error_info ) ) self._run_id_to_repository_location_handle_cache[run.run_id] = repository_location_handle return run
def _create_external_pipeline_run( instance, repo_location, external_repo, external_pipeline, run_config, mode, preset, tags, solid_selection, run_id, ): check.inst_param(instance, "instance", DagsterInstance) check.inst_param(repo_location, "repo_location", RepositoryLocation) check.inst_param(external_repo, "external_repo", ExternalRepository) check.inst_param(external_pipeline, "external_pipeline", ExternalPipeline) check.opt_dict_param(run_config, "run_config") check.opt_str_param(mode, "mode") check.opt_str_param(preset, "preset") check.opt_dict_param(tags, "tags", key_type=str) check.opt_list_param(solid_selection, "solid_selection", of_type=str) check.opt_str_param(run_id, "run_id") run_config, mode, tags, solid_selection = _check_execute_external_pipeline_args( external_pipeline, run_config, mode, preset, tags, solid_selection, ) pipeline_name = external_pipeline.name pipeline_selector = PipelineSelector( location_name=repo_location.name, repository_name=external_repo.name, pipeline_name=pipeline_name, solid_selection=solid_selection, ) subset_pipeline_result = repo_location.get_subset_external_pipeline_result( pipeline_selector) if subset_pipeline_result.success == False: raise DagsterLaunchFailedError( "Failed to load external pipeline subset: {error_message}".format( error_message=subset_pipeline_result.error.message), serializable_error_info=subset_pipeline_result.error, ) external_pipeline_subset = ExternalPipeline( subset_pipeline_result.external_pipeline_data, external_repo.handle, ) pipeline_mode = mode or external_pipeline_subset.get_default_mode_name() external_execution_plan = repo_location.get_external_execution_plan( external_pipeline_subset, run_config, pipeline_mode, step_keys_to_execute=None, ) if isinstance(external_execution_plan, ExecutionPlanSnapshotErrorData): raise DagsterLaunchFailedError( "Failed to load external execution plan", serializable_error_info=external_execution_plan.error, ) else: execution_plan_snapshot = external_execution_plan.execution_plan_snapshot return instance.create_run( pipeline_name=pipeline_name, run_id=run_id, run_config=run_config, mode=pipeline_mode, solids_to_execute=external_pipeline_subset.solids_to_execute, step_keys_to_execute=None, solid_selection=solid_selection, status=None, root_run_id=None, parent_run_id=None, tags=tags, pipeline_snapshot=external_pipeline_subset.pipeline_snapshot, execution_plan_snapshot=execution_plan_snapshot, parent_pipeline_snapshot=external_pipeline_subset. parent_pipeline_snapshot, external_pipeline_origin=external_pipeline_subset.get_external_origin( ), )