def __init__(self, dag, config: DagConfig, release: OpenshiftRelease): # General DAG Configuration self.dag = dag self.release = release self.dag_config = config self.release_name = release.get_release_name(delimiter="-") self.cluster_name = release._generate_cluster_name() # Airflow Variables self.ansible_orchestrator = var_loader.get_secret( "ansible_orchestrator", deserialize_json=True) self.exec_config = executor.get_default_executor_config( self.dag_config) # Merge all variables, prioritizing Airflow Secrets over git based vars self.config = { **self.ansible_orchestrator, **{ "es_server": var_loader.get_secret('elasticsearch'), "thanos_receiver_url": var_loader.get_secret('thanos_receiver_url'), "loki_receiver_url": var_loader.get_secret('loki_receiver_url') } } self.env = { "OPENSHIFT_CLUSTER_NAME": self.cluster_name, "KUBECONFIG_NAME": f"{self.release_name}-kubeconfig", "KUBEADMIN_NAME": f"{self.release_name}-kubeadmin", **self._insert_kube_env() }
def __init__(self, dag, config: DagConfig, release: OpenshiftRelease): # General DAG Configuration self.dag = dag self.release = release self.config = config self.release_name = release.get_release_name(delimiter="-") self.aws_creds = var_loader.get_secret("aws_creds", deserialize_json=True) # Specific Task Configuration self.vars = var_loader.build_task_vars( release=self.release, task="install") self.all_vars = { **self.vars, **self.aws_creds, } # Dump all vars to json file for Ansible to pick up with open(f"/tmp/{self.release_name}-postinstall-task.json", 'w') as json_file: json.dump(self.all_vars, json_file, sort_keys=True, indent=4) super().__init__() self.exec_config = executor.get_executor_config_with_cluster_access(self.config, self.release, executor_image="airflow-managed-services")
def __init__(self, dag, config: DagConfig, release: OpenshiftRelease): # General DAG Configuration self.dag = dag self.release = release self.dag_config = config self.release_name = release.get_release_name(delimiter="-") self.cluster_name = release._generate_cluster_name() # Specific Task Configuration self.vars = var_loader.build_task_vars(release, task="install") # Airflow Variables self.ansible_orchestrator = var_loader.get_secret( "ansible_orchestrator", deserialize_json=True) self.install_secrets = var_loader.get_secret( f"openshift_install_config", deserialize_json=True) self.aws_creds = var_loader.get_secret("aws_creds", deserialize_json=True) self.gcp_creds = var_loader.get_secret("gcp_creds", deserialize_json=True) self.azure_creds = var_loader.get_secret("azure_creds", deserialize_json=True) self.ocp_pull_secret = var_loader.get_secret("osp_ocp_pull_creds") self.openstack_creds = var_loader.get_secret("openstack_creds", deserialize_json=True) self.rosa_creds = var_loader.get_secret("rosa_creds", deserialize_json=True) self.rhacs_creds = var_loader.get_secret("rhacs_creds", deserialize_json=True) self.rogcp_creds = var_loader.get_secret("rogcp_creds") self.exec_config = executor.get_default_executor_config( self.dag_config) # Merge all variables, prioritizing Airflow Secrets over git based vars self.config = { **self.vars, **self.ansible_orchestrator, **self.install_secrets, **self.aws_creds, **self.gcp_creds, **self.azure_creds, **self.openstack_creds, **self.rosa_creds, **self.rhacs_creds, **{ "es_server": var_loader.get_secret('elasticsearch'), "thanos_receiver_url": var_loader.get_secret('thanos_receiver_url'), "loki_receiver_url": var_loader.get_secret('loki_receiver_url') } } if self.config.get('openshift_install_binary_url', "") == "" or self.config( 'openshift_client_location', "") == "": self.config = {**self.config, **self.release.get_latest_release()} super().__init__()
def __init__(self, release: OpenshiftRelease, config: DagConfig): self.release = release self.config = config self.release_name = release.get_release_name() tags = [] tags.append(self.release.platform) tags.append(self.release.release_stream) tags.append(self.release.variant) tags.append(self.release.version_alias) self.dag = DAG( self.release_name, default_args=self.config.default_args, tags=tags, description=f"DAG for Openshift Nightly builds {self.release_name}", schedule_interval=self.config.schedule_interval, max_active_runs=1, catchup=False) super().__init__()