def __init__(self, dag, config: DagConfig, release: OpenshiftRelease): # General DAG Configuration self.dag = dag self.release = release self.dag_config = config self.release_name = release.get_release_name(delimiter="-") self.cluster_name = release._generate_cluster_name() # Airflow Variables self.ansible_orchestrator = var_loader.get_secret( "ansible_orchestrator", deserialize_json=True) self.exec_config = executor.get_default_executor_config( self.dag_config) # Merge all variables, prioritizing Airflow Secrets over git based vars self.config = { **self.ansible_orchestrator, **{ "es_server": var_loader.get_secret('elasticsearch'), "thanos_receiver_url": var_loader.get_secret('thanos_receiver_url'), "loki_receiver_url": var_loader.get_secret('loki_receiver_url') } } self.env = { "OPENSHIFT_CLUSTER_NAME": self.cluster_name, "KUBECONFIG_NAME": f"{self.release_name}-kubeconfig", "KUBEADMIN_NAME": f"{self.release_name}-kubeadmin", **self._insert_kube_env() }
def __init__(self, dag, config: DagConfig, release: OpenshiftRelease): # General DAG Configuration self.dag = dag self.release = release self.dag_config = config self.release_name = release.get_release_name(delimiter="-") self.cluster_name = release._generate_cluster_name() # Specific Task Configuration self.vars = var_loader.build_task_vars(release, task="install") # Airflow Variables self.ansible_orchestrator = var_loader.get_secret( "ansible_orchestrator", deserialize_json=True) self.install_secrets = var_loader.get_secret( f"openshift_install_config", deserialize_json=True) self.aws_creds = var_loader.get_secret("aws_creds", deserialize_json=True) self.gcp_creds = var_loader.get_secret("gcp_creds", deserialize_json=True) self.azure_creds = var_loader.get_secret("azure_creds", deserialize_json=True) self.ocp_pull_secret = var_loader.get_secret("osp_ocp_pull_creds") self.openstack_creds = var_loader.get_secret("openstack_creds", deserialize_json=True) self.rosa_creds = var_loader.get_secret("rosa_creds", deserialize_json=True) self.rhacs_creds = var_loader.get_secret("rhacs_creds", deserialize_json=True) self.rogcp_creds = var_loader.get_secret("rogcp_creds") self.exec_config = executor.get_default_executor_config( self.dag_config) # Merge all variables, prioritizing Airflow Secrets over git based vars self.config = { **self.vars, **self.ansible_orchestrator, **self.install_secrets, **self.aws_creds, **self.gcp_creds, **self.azure_creds, **self.openstack_creds, **self.rosa_creds, **self.rhacs_creds, **{ "es_server": var_loader.get_secret('elasticsearch'), "thanos_receiver_url": var_loader.get_secret('thanos_receiver_url'), "loki_receiver_url": var_loader.get_secret('loki_receiver_url') } } if self.config.get('openshift_install_binary_url', "") == "" or self.config( 'openshift_client_location', "") == "": self.config = {**self.config, **self.release.get_latest_release()} super().__init__()
def get_cloud_releases(self): cloud = self.yaml['platforms']['cloud'] for version in self.yaml['versions']: if version['version'] in cloud['versions']: for provider in cloud['providers']: version_number = version['version'] release_stream = version['releaseStream'] version_alias = version['releaseStream'] for variant in cloud['variants']: platform_name = provider config = variant['config'].copy() config[ 'install'] = f"{provider}/{variant['config']['install']}" release = OpenshiftRelease( platform=platform_name, version=version_number, release_stream=release_stream, variant=variant['name'], config=config, version_alias=version_alias) schedule = self._get_schedule(variant, 'cloud') dag_config = self._build_dag_config(schedule) self.releases.append({ "config": dag_config, "release": release })
def get_openstack_releases(self): openstack = self.yaml['platforms']['openstack'] for version in self.yaml['versions']: if version['version'] in openstack['versions']: version_number = version['version'] release_stream = version['releaseStream'] version_alias = version['alias'] for variant in openstack['variants']: release = OpenshiftRelease( platform="openstack", version=version_number, release_stream=release_stream, latest_release=self.latest_releases[release_stream], variant=variant['name'], config=variant['config'], version_alias=version_alias ) schedule = self._get_schedule(variant, 'openstack') dag_config = self._build_dag_config(schedule) self.releases.append( { "config": dag_config, "release": release } )
def get_rosa_releases(self): rosa = self.yaml['platforms']['rosa'] for version in self.yaml['versions']: if version['version'] in rosa['versions']: version_number = version['version'] release_stream = version['releaseStream'] version_alias = version['alias'] for variant in rosa['variants']: release = OpenshiftRelease( platform="rosa", version=version_number, release_stream=release_stream, variant=variant['name'], config=variant['config'], version_alias=version_alias ) schedule = self._get_schedule(variant, 'rosa') dag_config = self._build_dag_config(schedule) self.releases.append( { "config": dag_config, "release": release } )
def __init__(self, dag, config: DagConfig, release: OpenshiftRelease): # General DAG Configuration self.dag = dag self.release = release self.config = config self.release_name = release.get_release_name(delimiter="-") self.aws_creds = var_loader.get_secret("aws_creds", deserialize_json=True) # Specific Task Configuration self.vars = var_loader.build_task_vars( release=self.release, task="install") self.all_vars = { **self.vars, **self.aws_creds, } # Dump all vars to json file for Ansible to pick up with open(f"/tmp/{self.release_name}-postinstall-task.json", 'w') as json_file: json.dump(self.all_vars, json_file, sort_keys=True, indent=4) super().__init__() self.exec_config = executor.get_executor_config_with_cluster_access(self.config, self.release, executor_image="airflow-managed-services")
def __init__(self, dag, config: DagConfig, release: OpenshiftRelease, task_group="benchmarks"): # General DAG Configuration self.dag = dag self.release = release self.task_group = task_group self.dag_config = config self.exec_config = executor.get_executor_config_with_cluster_access(self.dag_config, self.release) self.snappy_creds = var_loader.get_secret("snappy_creds", deserialize_json=True) self.es_gold = var_loader.get_secret("es_gold") self.es_server_baseline = var_loader.get_secret("es_server_baseline") # Specific Task Configuration self.vars = var_loader.build_task_vars( release=self.release, task=self.task_group) self.git_name=self._git_name() self.env = { "SNAPPY_DATA_SERVER_URL": self.snappy_creds['server'], "SNAPPY_DATA_SERVER_USERNAME": self.snappy_creds['username'], "SNAPPY_DATA_SERVER_PASSWORD": self.snappy_creds['password'], "SNAPPY_USER_FOLDER": self.git_name, "PLATFORM": self.release.platform, "TASK_GROUP": self.task_group, "ES_GOLD": self.es_gold, "ES_SERVER_BASELINE": self.es_server_baseline } self.env.update(self.dag_config.dependencies) if self.release.platform == "baremetal": self.install_vars = var_loader.build_task_vars( release, task="install") self.baremetal_install_secrets = var_loader.get_secret( f"baremetal_openshift_install_config", deserialize_json=True) self.config = { **self.install_vars, **self.baremetal_install_secrets } self.env = { **self.env, "SSHKEY_TOKEN": self.config['sshkey_token'], "ORCHESTRATION_USER": self.config['provisioner_user'], "ORCHESTRATION_HOST": self.config['provisioner_hostname'] } if self.release.platform == "rosa": self.rosa_creds = var_loader.get_secret("rosa_creds", deserialize_json=True) self.aws_creds = var_loader.get_secret("aws_creds", deserialize_json=True) self.environment = self.vars["environment"] if "environment" in self.vars else "staging" self.env = { **self.env, "ROSA_CLUSTER_NAME": release._generate_cluster_name(), "ROSA_ENVIRONMENT": self.environment, "ROSA_TOKEN": self.rosa_creds['rosa_token_'+self.environment], "AWS_ACCESS_KEY_ID": self.aws_creds['aws_access_key_id'], "AWS_SECRET_ACCESS_KEY": self.aws_creds['aws_secret_access_key'], "AWS_DEFAULT_REGION": self.aws_creds['aws_region_for_openshift'] }
def valid_openshift_release(): return OpenshiftRelease( platform="platform", version="version", release_stream="release_stream", variant="variant", config={ "install": "install.json", "test": "test.json" }, version_alias="alias" )
def get_prebuilt_releases(self): prebuilt = self.yaml['platforms']['prebuilt'] for variant in prebuilt['variants']: release = OpenshiftRelease(platform="prebuilt", version="4.x", release_stream="", latest_release={}, variant=variant['name'], config=variant['config'], version_alias="") schedule = self._get_schedule(variant, 'prebuilt') dag_config = self._build_dag_config(schedule) self.releases.append({"config": dag_config, "release": release})
def valid_openshift_release(): return OpenshiftRelease(platform="platform", version="version", release_stream="release_stream", variant="variant", config={ "install": "install.json", "test": "test.json" }, version_alias="alias", latest_release={ "openshift_client_location": "foo", "openshift_install_binary_url": "bar" })
def __init__(self, release: OpenshiftRelease, config: DagConfig): self.release = release self.config = config self.release_name = release.get_release_name() tags = [] tags.append(self.release.platform) tags.append(self.release.release_stream) tags.append(self.release.variant) tags.append(self.release.version_alias) self.dag = DAG( self.release_name, default_args=self.config.default_args, tags=tags, description=f"DAG for Openshift Nightly builds {self.release_name}", schedule_interval=self.config.schedule_interval, max_active_runs=1, catchup=False) super().__init__()