def execute(self, context): hook = GoogleBaseHook(gcp_conn_id=self.gcp_conn_id) self.project_id = self.project_id or hook.project_id if not self.project_id: raise AirflowException( "The project id must be passed either as " "keyword project_id parameter or as project_id extra " "in GCP connection definition. Both are not set!") # Write config to a temp file and set the environment variable to point to it. # This is to avoid race conditions of reading/writing a single file with tempfile.NamedTemporaryFile() as conf_file,\ patch_environ({KUBE_CONFIG_ENV_VAR: conf_file.name}), \ hook.provide_authorized_gcloud(): # Attempt to get/update credentials # We call gcloud directly instead of using google-cloud-python api # because there is no way to write kubernetes config to a file, which is # required by KubernetesPodOperator. # The gcloud command looks at the env variable `KUBECONFIG` for where to save # the kubernetes config file. cmd = [ "gcloud", "container", "clusters", "get-credentials", self.cluster_name, "--zone", self.location, "--project", self.project_id ] if self.use_internal_ip: cmd.append('--internal-ip') execute_in_subprocess(cmd) # Tell `KubernetesPodOperator` where the config file is located self.config_file = os.environ[KUBE_CONFIG_ENV_VAR] return super().execute(context)
def execute(self, context: 'Context') -> Optional[str]: hook = GoogleBaseHook(gcp_conn_id=self.gcp_conn_id) self.project_id = self.project_id or hook.project_id if not self.project_id: raise AirflowException( "The project id must be passed either as " "keyword project_id parameter or as project_id extra " "in Google Cloud connection definition. Both are not set!") # Write config to a temp file and set the environment variable to point to it. # This is to avoid race conditions of reading/writing a single file with tempfile.NamedTemporaryFile() as conf_file, patch_environ( {KUBE_CONFIG_ENV_VAR: conf_file.name}), hook.provide_authorized_gcloud(): # Attempt to get/update credentials # We call gcloud directly instead of using google-cloud-python api # because there is no way to write kubernetes config to a file, which is # required by KubernetesPodOperator. # The gcloud command looks at the env variable `KUBECONFIG` for where to save # the kubernetes config file. cmd = [ "gcloud", "container", "clusters", "get-credentials", self.cluster_name, "--project", self.project_id, ] if self.impersonation_chain: if isinstance(self.impersonation_chain, str): impersonation_account = self.impersonation_chain elif len(self.impersonation_chain) == 1: impersonation_account = self.impersonation_chain[0] else: raise AirflowException( "Chained list of accounts is not supported, please specify only one service account" ) cmd.extend([ '--impersonate-service-account', impersonation_account, ]) if self.regional: cmd.append('--region') else: cmd.append('--zone') cmd.append(self.location) if self.use_internal_ip: cmd.append('--internal-ip') execute_in_subprocess(cmd) # Tell `KubernetesPodOperator` where the config file is located self.config_file = os.environ[KUBE_CONFIG_ENV_VAR] return super().execute(context)
def _client(self) -> gcp_logging.Client: """Google Cloud Library API client""" if self.gcp_conn_id: from airflow.providers.google.common.hooks.base_google import GoogleBaseHook hook = GoogleBaseHook(gcp_conn_id=self.gcp_conn_id) credentials = hook._get_credentials() # pylint: disable=protected-access else: # Use Application Default Credentials credentials = None client = gcp_logging.Client( credentials=credentials, client_info=ClientInfo(client_library_version='airflow_v' + version.version)) return client
def _get_credential_parameters(self) -> List[str]: connection = GoogleBaseHook.get_connection(conn_id=self.gcp_conn_id) if connection.extra_dejson.get(GCP_CREDENTIALS_KEY_PATH): credential_params = ['-credential_file', connection.extra_dejson[GCP_CREDENTIALS_KEY_PATH]] elif connection.extra_dejson.get(GCP_CREDENTIALS_KEYFILE_DICT): credential_file_content = json.loads(connection.extra_dejson[GCP_CREDENTIALS_KEYFILE_DICT]) self.log.info("Saving credentials to %s", self.credentials_path) with open(self.credentials_path, "w") as file: json.dump(credential_file_content, file) credential_params = ['-credential_file', self.credentials_path] else: self.log.info( "The credentials are not supplied by neither key_path nor " "keyfile_dict of the gcp connection %s. Falling back to " "default activated account", self.gcp_conn_id, ) credential_params = [] if not self.instance_specification: project_id = connection.extra_dejson.get('extra__google_cloud_platform__project') if self.project_id: project_id = self.project_id if not project_id: raise AirflowException( "For forwarding all instances, the project id " "for Google Cloud should be provided either " "by project_id extra in the Google Cloud connection or by " "project_id provided in the operator." ) credential_params.extend(['-projects', project_id]) return credential_params