def __init__(self, dag, config: DagConfig, release: OpenshiftRelease):

        # General DAG Configuration
        self.dag = dag
        self.release = release
        self.config = config
        self.exec_config = executor.get_executor_config_with_cluster_access(
            self.config, self.release)

        # Specific Task Configuration
        self.env = {
            "REL_PLATFORM": self.release.platform,
            "THANOS_RECEIVER_URL":
            var_loader.get_secret("thanos_receiver_url"),
            "LOKI_RECEIVER_URL": var_loader.get_secret("loki_receiver_url")
        }

        if self.release.platform == "baremetal":
            self.install_vars = var_loader.build_task_vars(release,
                                                           task="install")
            self.baremetal_install_secrets = var_loader.get_secret(
                f"baremetal_openshift_install_config", deserialize_json=True)

            self.config = {
                **self.install_vars,
                **self.baremetal_install_secrets
            }

            self.env = {
                **self.env, "SSHKEY_TOKEN": self.config['sshkey_token'],
                "ORCHESTRATION_USER": self.config['provisioner_user'],
                "ORCHESTRATION_HOST": self.config['provisioner_hostname']
            }
    def __init__(self, dag, config: DagConfig, release: OpenshiftRelease):

        # General DAG Configuration
        self.dag = dag
        self.release = release
        self.dag_config = config
        self.release_name = release.get_release_name(delimiter="-")
        self.cluster_name = release._generate_cluster_name()

        # Airflow Variables
        self.ansible_orchestrator = var_loader.get_secret(
            "ansible_orchestrator", deserialize_json=True)

        self.exec_config = executor.get_default_executor_config(
            self.dag_config)

        # Merge all variables, prioritizing Airflow Secrets over git based vars
        self.config = {
            **self.ansible_orchestrator,
            **{
                "es_server": var_loader.get_secret('elasticsearch'),
                "thanos_receiver_url": var_loader.get_secret('thanos_receiver_url'),
                "loki_receiver_url": var_loader.get_secret('loki_receiver_url')
            }
        }

        self.env = {
            "OPENSHIFT_CLUSTER_NAME": self.cluster_name,
            "KUBECONFIG_NAME": f"{self.release_name}-kubeconfig",
            "KUBEADMIN_NAME": f"{self.release_name}-kubeadmin",
            **self._insert_kube_env()
        }
Beispiel #3
0
    def __init__(self,
                 dag,
                 config: DagConfig,
                 release: OpenshiftRelease,
                 task_group="benchmarks"):
        # General DAG Configuration
        self.dag = dag
        self.release = release
        self.task_group = task_group
        self.dag_config = config
        self.exec_config = executor.get_executor_config_with_cluster_access(
            self.dag_config, self.release)
        self.snappy_creds = var_loader.get_secret("snappy_creds",
                                                  deserialize_json=True)
        self.es_gold = var_loader.get_secret("es_gold")
        self.es_server_baseline = var_loader.get_secret("es_server_baseline")

        # Specific Task Configuration
        self.vars = var_loader.build_task_vars(release=self.release,
                                               task=self.task_group)
        self.git_name = self._git_name()
        self.env = {
            "SNAPPY_DATA_SERVER_URL": self.snappy_creds['server'],
            "SNAPPY_DATA_SERVER_USERNAME": self.snappy_creds['username'],
            "SNAPPY_DATA_SERVER_PASSWORD": self.snappy_creds['password'],
            "SNAPPY_USER_FOLDER": self.git_name,
            "PLATFORM": self.release.platform,
            "TASK_GROUP": self.task_group,
            "ES_GOLD": self.es_gold,
            "ES_SERVER_BASELINE": self.es_server_baseline
        }
        self.env.update(self.dag_config.dependencies)

        if self.release.platform == "baremetal":
            self.install_vars = var_loader.build_task_vars(release,
                                                           task="install")
            self.baremetal_install_secrets = var_loader.get_secret(
                f"baremetal_openshift_install_config", deserialize_json=True)

            self.config = {
                **self.install_vars,
                **self.baremetal_install_secrets
            }

            self.env = {
                **self.env, "SSHKEY_TOKEN": self.config['sshkey_token'],
                "ORCHESTRATION_USER": self.config['provisioner_user'],
                "ORCHESTRATION_HOST": self.config['provisioner_hostname']
            }
    def __init__(self, dag, config: DagConfig, release: OpenshiftRelease):

        # General DAG Configuration
        self.dag = dag
        self.release = release
        self.config = config
        self.release_name = release.get_release_name(delimiter="-")

        self.aws_creds = var_loader.get_secret("aws_creds", deserialize_json=True)

        # Specific Task Configuration
        self.vars = var_loader.build_task_vars(
            release=self.release, task="install")

        self.all_vars = {
            **self.vars,
            **self.aws_creds,
        }

        # Dump all vars to json file for Ansible to pick up
        with open(f"/tmp/{self.release_name}-postinstall-task.json", 'w') as json_file:
            json.dump(self.all_vars, json_file, sort_keys=True, indent=4)

        super().__init__()

        self.exec_config = executor.get_executor_config_with_cluster_access(self.config, self.release, executor_image="airflow-managed-services")
Beispiel #5
0
    def _get_benchmark(self, benchmark):
        env = {
            **self.env,
            **benchmark.get('env', {}),
            **{
                "ES_SERVER": var_loader.get_secret('elasticsearch'),
                "KUBEADMIN_PASSWORD": environ.get("KUBEADMIN_PASSWORD", "")
            }
        }
        task_prefix = f"{self.task_group}-"
        task = BashOperator(
            task_id=
            f"{task_prefix if self.task_group != 'benchmarks' else ''}{benchmark['name']}",
            depends_on_past=False,
            bash_command=
            f"{constants.root_dag_dir}/scripts/run_benchmark.sh -w {benchmark['workload']} -c {benchmark['command']} ",
            retries=0,
            trigger_rule=benchmark.get("trigger_rule", "all_success"),
            dag=self.dag,
            env=env,
            do_xcom_push=True,
            execution_timeout=timedelta(seconds=21600),
            executor_config=self.exec_config)

        self._add_indexer(task)
        return task
Beispiel #6
0
 def __init__(self, dag, config: DagConfig, release: OpenshiftRelease):
     super().__init__(dag, config, release)
     self.exec_config = executor.get_executor_config_with_cluster_access(
         self.dag_config,
         self.release,
         executor_image="airflow-managed-services")
     self.hypershift_pull_secret = var_loader.get_secret(
         "hypershift_pull_secret")
Beispiel #7
0
 def __init__(self, root_dag_dir):
     with open(f"{root_dag_dir}/manifest.yaml") as manifest_file:
         try:
             self.yaml = yaml.safe_load(manifest_file)
         except yaml.YAMLError as exc:
             print(exc)
     self.releases = []
     self.release_stream_base_url = var_loader.get_secret("release_stream_base_url")
     self.get_latest_releases()
    def _get_task(self, operation="install", trigger_rule="all_success"):
        bash_script = ""

        # Merge all variables, prioritizing Airflow Secrets over git based vars
        config = {
            **self.vars,
            **self.baremetal_install_secrets,
            **{
                "es_server": var_loader.get_secret('elasticsearch')
            }
        }

        config['pullsecret'] = json.dumps(
            config['openshift_install_pull_secret'])
        config['version'] = self.release.release_stream
        config['build'] = self.release.build

        # Required Environment Variables for Install script
        env = {
            "SSHKEY_TOKEN": config['sshkey_token'],
            "OPENSHIFT_WORKER_COUNT": config['openshift_worker_count'],
            "BAREMETAL_NETWORK_CIDR": config['baremetal_network_cidr'],
            "BAREMETAL_NETWORK_VLAN": config['baremetal_network_vlan'],
            "OPENSHIFT_BASE_DOMAIN": config['openshift_base_domain'],
            "JETSKI_SKIPTAGS": config['jetski_skiptags'],
            "KUBECONFIG_PATH": config['kubeconfig_path'],
            "KUBECONFIG_NAME": f"{self.release_name}-kubeconfig",
            "KUBEADMIN_NAME": f"{self.release_name}-kubeadmin",
            "CURRENT_WORKER_COUNT": config['openshift_worker_count'],
            "TARGET_WORKER_COUNT": config['openshift_worker_scaleup_target'],
            "SCALE_STEP": config['openshift_worker_scale_step'],
            **self._insert_kube_env()
        }

        if operation == "install":
            config['worker_count'] = config['openshift_worker_count']
            bash_script = f"{constants.root_dag_dir}/scripts/install/baremetal_install.sh"
        else:
            config['worker_count'] = config['openshift_worker_scaleup_target']
            bash_script = f"{constants.root_dag_dir}/scripts/install/baremetal_scaleup.sh"

        # Dump all vars to json file for Ansible to pick up
        with open(f"/tmp/{self.release_name}-{operation}-task.json",
                  'w') as json_file:
            json.dump(config, json_file, sort_keys=True, indent=4)

        return BashOperator(
            task_id=f"{operation}-cluster",
            depends_on_past=False,
            bash_command=
            f"{bash_script} -p {self.release.platform} -v {self.release.version} -j /tmp/{self.release_name}-{operation}-task.json -o {operation} ",
            retries=3,
            dag=self.dag,
            trigger_rule=trigger_rule,
            executor_config=self.exec_config,
            env=env)
Beispiel #9
0
    def __init__(self, dag, config: DagConfig, release: OpenshiftRelease, task_group="benchmarks"):
        # General DAG Configuration
        self.dag = dag
        self.release = release
        self.task_group = task_group
        self.dag_config = config
        self.exec_config = executor.get_executor_config_with_cluster_access(self.dag_config, self.release)
        self.snappy_creds = var_loader.get_secret("snappy_creds", deserialize_json=True)
        self.es_gold = var_loader.get_secret("es_gold")
        self.es_server_baseline = var_loader.get_secret("es_server_baseline")

        # Specific Task Configuration
        self.vars = var_loader.build_task_vars(
            release=self.release, task=self.task_group)
        self.git_name=self._git_name()
        self.env = {
            "SNAPPY_DATA_SERVER_URL": self.snappy_creds['server'],
            "SNAPPY_DATA_SERVER_USERNAME": self.snappy_creds['username'],
            "SNAPPY_DATA_SERVER_PASSWORD": self.snappy_creds['password'],
            "SNAPPY_USER_FOLDER": self.git_name,
            "PLATFORM": self.release.platform,
            "TASK_GROUP": self.task_group,
            "ES_GOLD": self.es_gold,
            "ES_SERVER_BASELINE": self.es_server_baseline
        }
        self.env.update(self.dag_config.dependencies)

        if self.release.platform == "baremetal":
            self.install_vars = var_loader.build_task_vars(
                release, task="install")
            self.baremetal_install_secrets = var_loader.get_secret(
            f"baremetal_openshift_install_config", deserialize_json=True)

            self.config = {
                **self.install_vars,
                **self.baremetal_install_secrets
            }

            self.env = {
                **self.env,
                "SSHKEY_TOKEN": self.config['sshkey_token'],
                "ORCHESTRATION_USER": self.config['provisioner_user'],
                "ORCHESTRATION_HOST": self.config['provisioner_hostname']
            }
        
        if self.release.platform == "rosa":
            self.rosa_creds = var_loader.get_secret("rosa_creds", deserialize_json=True)
            self.aws_creds = var_loader.get_secret("aws_creds", deserialize_json=True)
            self.environment = self.vars["environment"] if "environment" in self.vars else "staging"
            self.env = {
                **self.env,
                "ROSA_CLUSTER_NAME": release._generate_cluster_name(),
                "ROSA_ENVIRONMENT": self.environment,
                "ROSA_TOKEN": self.rosa_creds['rosa_token_'+self.environment],
                "AWS_ACCESS_KEY_ID": self.aws_creds['aws_access_key_id'],
                "AWS_SECRET_ACCESS_KEY": self.aws_creds['aws_secret_access_key'],
                "AWS_DEFAULT_REGION": self.aws_creds['aws_region_for_openshift']
            }
 def _get_util(self, util):
     env = {
         **self.env,
         **util.get('env', {}),
         **{
             "ES_SERVER": var_loader.get_secret('elasticsearch')
         },
         **{
             "KUBEADMIN_PASSWORD": environ.get("KUBEADMIN_PASSWORD", "")
         }
     }
     return BashOperator(
         task_id=f"{util['name']}",
         depends_on_past=False,
         bash_command=
         f"{constants.root_dag_dir}/scripts/utils/run_scale_ci_diagnosis.sh -w {util['workload']} -c {util['command']} ",
         retries=3,
         dag=self.dag,
         env=env,
         executor_config=self.exec_config)
Beispiel #11
0
 def get_index_task(self):
     env = {
         **self.env, 
         **{"ES_SERVER": var_loader.get_secret('elasticsearch')},
         **environ
     }
     if self.task != "install":
         command = f'UUID={{{{ ti.xcom_pull("{self.task}") }}}} {constants.root_dag_dir}/scripts/index.sh '
     else:
         command = f'{constants.root_dag_dir}/scripts/index.sh '
 
     return BashOperator(
         task_id=f"index-{self.task}",
         depends_on_past=False,
         bash_command=command,
         retries=3,
         dag=self.dag,
         trigger_rule="all_done",
         executor_config=self.exec_config,
         env=env
     )
    def _get_task(self, trigger_rule="all_success"):
        bash_script = f"{constants.root_dag_dir}/scripts/install/baremetal_deploy_webfuse.sh"

        # Merge all variables, prioritizing Airflow Secrets over git based vars
        config = {
            **self.vars,
            **self.baremetal_install_secrets,
            **{
                "es_server": var_loader.get_secret('elasticsearch')
            }
        }

        config['version'] = self.release.release_stream
        config['build'] = self.release.build

        # Required Environment Variables for Install script
        env = {
            "SSHKEY_TOKEN": config['sshkey_token'],
            "ORCHESTRATION_HOST": config['provisioner_hostname'],
            "ORCHESTRATION_USER": config['provisioner_user'],
            "WEBFUSE_SKIPTAGS": config['webfuse_skiptags'],
            "WEBFUSE_PLAYBOOK": config['webfuse_playbook'],
            **self._insert_kube_env()
        }

        # Dump all vars to json file for Ansible to pick up
        with open(f"/tmp/{self.release_name}-task.json", 'w') as json_file:
            json.dump(config, json_file, sort_keys=True, indent=4)

        return BashOperator(
            task_id="deploy-webfuse",
            depends_on_past=False,
            bash_command=
            f"{bash_script} -p {self.release.platform} -v {self.release.version} -j /tmp/{self.release_name}-task.json -o deploy_app ",
            retries=3,
            dag=self.dag,
            trigger_rule=trigger_rule,
            executor_config=self.exec_config,
            env=env)
    def __init__(self, dag, config: DagConfig, release: OpenshiftRelease):

        # General DAG Configuration
        self.dag = dag
        self.release = release
        self.config = config
        self.exec_config = executor.get_executor_config_with_cluster_access(
            self.config, self.release)
        self.snappy_creds = var_loader.get_secret("snappy_creds",
                                                  deserialize_json=True)

        # Specific Task Configuration
        self.vars = var_loader.build_task_vars(release=self.release,
                                               task="utils")
        self.git_name = self._git_name()
        self.env = {
            "SNAPPY_DATA_SERVER_URL": self.snappy_creds['server'],
            "SNAPPY_DATA_SERVER_USERNAME": self.snappy_creds['username'],
            "SNAPPY_DATA_SERVER_PASSWORD": self.snappy_creds['password'],
            "SNAPPY_USER_FOLDER": self.git_name
        }
        self.env.update(self.config.dependencies)
    def __init__(self, dag, config: DagConfig, release: OpenshiftRelease):

        # General DAG Configuration
        self.dag = dag
        self.release = release
        self.dag_config = config
        self.release_name = release.get_release_name(delimiter="-")
        self.cluster_name = release._generate_cluster_name()

        # Specific Task Configuration
        self.vars = var_loader.build_task_vars(release, task="install")

        # Airflow Variables
        self.ansible_orchestrator = var_loader.get_secret(
            "ansible_orchestrator", deserialize_json=True)

        self.install_secrets = var_loader.get_secret(
            f"openshift_install_config", deserialize_json=True)
        self.aws_creds = var_loader.get_secret("aws_creds",
                                               deserialize_json=True)
        self.gcp_creds = var_loader.get_secret("gcp_creds",
                                               deserialize_json=True)
        self.azure_creds = var_loader.get_secret("azure_creds",
                                                 deserialize_json=True)
        self.ocp_pull_secret = var_loader.get_secret("osp_ocp_pull_creds")
        self.openstack_creds = var_loader.get_secret("openstack_creds",
                                                     deserialize_json=True)
        self.rosa_creds = var_loader.get_secret("rosa_creds",
                                                deserialize_json=True)
        self.rhacs_creds = var_loader.get_secret("rhacs_creds",
                                                 deserialize_json=True)
        self.rogcp_creds = var_loader.get_secret("rogcp_creds")
        self.release_stream_base_url = var_loader.get_secret(
            "release_stream_base_url")
        self.exec_config = executor.get_default_executor_config(
            self.dag_config)

        # Merge all variables, prioritizing Airflow Secrets over git based vars
        self.config = {
            **self.vars,
            **self.ansible_orchestrator,
            **self.install_secrets,
            **self.aws_creds,
            **self.gcp_creds,
            **self.azure_creds,
            **self.openstack_creds,
            **self.rosa_creds,
            **self.rhacs_creds,
            **self.release.get_latest_release(self.release_stream_base_url),
            **{
                "es_server": var_loader.get_secret('elasticsearch'),
                "thanos_receiver_url": var_loader.get_secret('thanos_receiver_url'),
                "loki_receiver_url": var_loader.get_secret('loki_receiver_url')
            }
        }
        super().__init__()
 def test_get_secret(self, mocker):
     mocker.patch('openshift_nightlies.util.var_loader.get_overrides',
                  return_value={})
     mocker.patch('airflow.models.Variable.get')
     var_loader.get_secret("foo")
     Variable.get.assert_called_once_with("foo", deserialize_json=False)
Beispiel #16
0
 def __init__(self, dag, config: DagConfig, release: BaremetalRelease):
     self.baremetal_install_secrets = var_loader.get_secret(
         f"baremetal_openshift_install_config", deserialize_json=True)
     super().__init__(dag, config, release)
     self.exec_config = executor.get_default_executor_config(
         self.dag_config, executor_image="airflow-jetski")
 def test_get_overridden_secret(self, mocker):
     mocker.patch('openshift_nightlies.util.var_loader.get_overrides',
                  return_value={"foo": "bar"})
     assert var_loader.get_secret("foo") == "bar"