def execute(self, context): try: from dagster_graphql.client.mutations import ( handle_start_pipeline_execution_errors, handle_start_pipeline_execution_result, ) except ImportError: raise AirflowException( 'To use the DagsterDockerOperator, dagster and dagster_graphql must be installed ' 'in your Airflow environment.') if 'run_id' in self.params: self._run_id = self.params['run_id'] elif 'dag_run' in context and context['dag_run'] is not None: self._run_id = context['dag_run'].run_id try: raw_res = super(DagsterDockerOperator, self).execute(context) self.log.info('Finished executing container.') res = parse_raw_res(raw_res) handle_start_pipeline_execution_errors(res) events = handle_start_pipeline_execution_result(res) skip_self_if_necessary(events) return events finally: self._run_id = None
def execute(self, context): try: from dagster_graphql.client.mutations import ( handle_start_pipeline_execution_errors, handle_start_pipeline_execution_result, ) except ImportError: raise AirflowException( 'To use the DagsterKubernetesPodOperator, dagster and dagster_graphql must be' ' installed in your Airflow environment.') if 'run_id' in self.params: self._run_id = self.params['run_id'] elif 'dag_run' in context and context['dag_run'] is not None: self._run_id = context['dag_run'].run_id # return to original execute code: try: client = kube_client.get_kube_client( in_cluster=self.in_cluster, cluster_context=self.cluster_context, config_file=self.config_file, ) gen = pod_generator.PodGenerator() for mount in self.volume_mounts: gen.add_mount(mount) for volume in self.volumes: gen.add_volume(volume) pod = gen.make_pod( namespace=self.namespace, image=self.image, pod_id=self.name, cmds=self.cmds, arguments=self.query, labels=self.labels, ) pod.service_account_name = self.service_account_name pod.secrets = self.secrets pod.envs = self.env_vars pod.image_pull_policy = self.image_pull_policy pod.image_pull_secrets = self.image_pull_secrets pod.annotations = self.annotations pod.resources = self.resources pod.affinity = self.affinity pod.node_selectors = self.node_selectors pod.hostnetwork = self.hostnetwork pod.tolerations = self.tolerations pod.configmaps = self.configmaps pod.security_context = self.security_context launcher = pod_launcher.PodLauncher(kube_client=client, extract_xcom=self.xcom_push) try: # we won't use the "result", which is the pod's xcom json file (final_state, _) = launcher.run_pod( pod, startup_timeout=self.startup_timeout_seconds, get_logs=self.get_logs) # fetch the last line independently of whether logs were read # unbelievably, if you set tail_lines=1, the returned json has its double quotes # turned into unparseable single quotes # TODO: add retries - k8s log servers are _extremely_ flaky raw_res = client.read_namespaced_pod_log( name=pod.name, namespace=pod.namespace, container='base', tail_lines=5) # find the relevant line # TODO: raise sensible exception on garbage API string responses res = parse_raw_res(raw_res) handle_start_pipeline_execution_errors(res) events = handle_start_pipeline_execution_result(res) check_events_for_skips(events) return events finally: self._run_id = None if self.is_delete_operator_pod: launcher.delete_pod(pod) if final_state != State.SUCCESS: raise AirflowException( 'Pod returned a failure: {state}'.format( state=final_state)) # note the lack of returning the default xcom except AirflowException as ex: raise AirflowException( 'Pod Launching failed: {error}'.format(error=ex))