Esempio n. 1
0
 def _get_util(self, util):
     env = {**self.env, **util.get('env', {}), **{"ES_SERVER": var_loader.get_elastic_url()}, **{"KUBEADMIN_PASSWORD": environ.get("KUBEADMIN_PASSWORD", "")}}
     return BashOperator(
         task_id=f"{util['name']}",
         depends_on_past=False,
         bash_command=f"{constants.root_dag_dir}/scripts/utils/run_scale_ci_diagnosis.sh -w {util['workload']} -c {util['command']} ",
         retries=3,
         dag=self.dag,
         env=env,
         executor_config=self.exec_config
     )
Esempio n. 2
0
    def _get_task(self, operation="install", trigger_rule="all_success"):
        bash_script = ""

        # Merge all variables, prioritizing Airflow Secrets over git based vars
        config = {
            **self.vars,
            **self.install_secrets,
            **{
                "es_server": var_loader.get_elastic_url()
            }
        }

        config['pullsecret'] = json.dumps(
            config['openshift_install_pull_secret'])
        config['version'] = config['openshift_release']
        config['build'] = config['openshift_build']

        # Required Environment Variables for Install script
        env = {
            "SSHKEY_TOKEN": config['sshkey_token'],
            "OPENSHIFT_WORKER_COUNT": config['openshift_worker_count'],
            "BAREMETAL_NETWORK_CIDR": config['baremetal_network_cidr'],
            "BAREMETAL_NETWORK_VLAN": config['baremetal_network_vlan'],
            "OPENSHIFT_BASE_DOMAIN": config['openshift_base_domain'],
            "KUBECONFIG_PATH": config['kubeconfig_path'],
            **self._insert_kube_env()
        }

        if operation == "install":
            config['worker_count'] = config['openshift_worker_count']
            bash_script = f"{constants.root_dag_dir}/scripts/install/baremetal_install.sh"
        else:
            config['worker_count'] = config['openshift_worker_scaleup_target']
            bash_script = f"{constants.root_dag_dir}/scripts/install/baremetal_scaleup.sh"

        # Dump all vars to json file for Ansible to pick up
        with open(
                f"/tmp/{self.version}-{self.platform}-{self.profile}-{operation}-task.json",
                'w') as json_file:
            json.dump(config, json_file, sort_keys=True, indent=4)

        return BashOperator(
            task_id=f"{operation}-cluster",
            depends_on_past=False,
            bash_command=
            f"{bash_script} -p {self.platform} -v {self.version} -j /tmp/{self.version}-{self.platform}-{self.profile}-{operation}-task.json -o {operation} ",
            retries=3,
            dag=self.dag,
            trigger_rule=trigger_rule,
            executor_config=self.exec_config,
            env=env)
Esempio n. 3
0
    def __init__(self, dag, version, release_stream, latest_release, platform,
                 profile):
        self.exec_config = var_loader.get_default_executor_config()

        # General DAG Configuration
        self.dag = dag
        self.platform = platform  # e.g. aws
        self.version = version  # e.g. 4.6/4.7, major.minor only
        # true release stream to follow. Nightlies, CI, etc.
        self.release_stream = release_stream
        self.latest_release = latest_release  # latest relase from the release stream
        self.profile = profile  # e.g. default/ovn

        # Specific Task Configuration
        self.vars = var_loader.build_task_vars(task="install",
                                               version=version,
                                               platform=platform,
                                               profile=profile)

        # Airflow Variables
        self.ansible_orchestrator = Variable.get("ansible_orchestrator",
                                                 deserialize_json=True)

        self.install_secrets = Variable.get(f"openshift_install_config",
                                            deserialize_json=True)
        self.aws_creds = Variable.get("aws_creds", deserialize_json=True)
        self.gcp_creds = Variable.get("gcp_creds", deserialize_json=True)
        self.azure_creds = Variable.get("azure_creds", deserialize_json=True)
        self.ocp_pull_secret = Variable.get("osp_ocp_pull_creds")
        self.openstack_creds = Variable.get("openstack_creds",
                                            deserialize_json=True)

        # Merge all variables, prioritizing Airflow Secrets over git based vars
        self.config = {
            **self.vars,
            **self.ansible_orchestrator,
            **self.install_secrets,
            **self.aws_creds,
            **self.gcp_creds,
            **self.azure_creds,
            **self.openstack_creds,
            **self.latest_release,
            **{
                "es_server": var_loader.get_elastic_url()
            }
        }
        super().__init__()
Esempio n. 4
0
    def get_index_task(self):
        env = {
            **self.env, 
            **{"ES_SERVER": var_loader.get_elastic_url()},
            **environ
        }

        return BashOperator(
            task_id=f"index_{self.task}",
            depends_on_past=False,
            bash_command=f"{constants.root_dag_dir}/scripts/index.sh ",
            retries=3,
            dag=self.dag,
            trigger_rule="all_done",
            executor_config=self.exec_config,
            env=env
        )
Esempio n. 5
0
 def _get_benchmark(self, benchmark):
     env = {
         **self.env,
         **benchmark.get('env', {}),
         **{
             "ES_SERVER": var_loader.get_elastic_url()
         }
     }
     return BashOperator(
         task_id=f"{benchmark['name']}",
         depends_on_past=False,
         bash_command=
         f"{constants.root_dag_dir}/scripts/run_benchmark.sh -w {benchmark['workload']} -c {benchmark['command']} ",
         retries=0,
         dag=self.dag,
         env=env,
         executor_config=self.exec_config)
Esempio n. 6
0
    def _get_task(self, trigger_rule="all_success"):
        bash_script = f"{constants.root_dag_dir}/scripts/install/baremetal_deploy_webfuse.sh"

        # Merge all variables, prioritizing Airflow Secrets over git based vars
        config = {
            **self.vars,
            **self.install_secrets,
            **{
                "es_server": var_loader.get_elastic_url()
            }
        }

        config['version'] = config['openshift_release']
        config['build'] = config['openshift_build']

        # Required Environment Variables for Install script
        env = {
            "SSHKEY_TOKEN": config['sshkey_token'],
            "ORCHESTRATION_HOST": config['provisioner_hostname'],
            "ORCHESTRATION_USER": config['provisioner_user'],
            "WEBFUSE_SKIPTAGS": config['webfuse_skiptags'],
            "WEBFUSE_PLAYBOOK": config['webfuse_playbook'],
            **self._insert_kube_env()
        }

        # Dump all vars to json file for Ansible to pick up
        with open(
                f"/tmp/{self.version}-{self.platform}-{self.profile}-task.json",
                'w') as json_file:
            json.dump(config, json_file, sort_keys=True, indent=4)

        return BashOperator(
            task_id="deploy-webfuse",
            depends_on_past=False,
            bash_command=
            f"{bash_script} -p {self.platform} -v {self.version} -j /tmp/{self.version}-{self.platform}-{self.profile}-task.json -o deploy_app ",
            retries=3,
            dag=self.dag,
            trigger_rule=trigger_rule,
            executor_config=self.exec_config,
            env=env)
Esempio n. 7
0
    def _get_task(self, operation="install", trigger_rule="all_success"):
        playbook_operations = {}
        if operation == "install":
            playbook_operations = {
                "openshift_cleanup": True,
                "openshift_debug_config": False,
                "openshift_install": True,
                "openshift_post_config": True,
                "openshift_post_install": True
            }
        else:
            playbook_operations = {
                "openshift_cleanup": True,
                "openshift_debug_config": False,
                "openshift_install": False,
                "openshift_post_config": False,
                "openshift_post_install": False
            }

        # Merge all variables, prioritizing Airflow Secrets over git based vars
        config = {
            **self.vars,
            **self.ansible_orchestrator,
            **self.install_secrets,
            **self.aws_creds,
            **self.gcp_creds,
            **self.azure_creds,
            **playbook_operations,
            **var_loader.get_latest_release_from_stream(
                self.release_stream_base_url, self.release_stream),
            **{
                "es_server": var_loader.get_elastic_url()
            }
        }

        git_user = var_loader.get_git_user()
        if git_user == 'cloud-bulldozer':
            config[
                'openshift_cluster_name'] = f"ci-{self.version}-{self.platform}-{self.profile}"
        else:
            config[
                'openshift_cluster_name'] = f"{git_user}-{self.version}-{self.platform}-{self.profile}"

        config['dynamic_deploy_path'] = f"{config['openshift_cluster_name']}"
        config[
            'kubeconfig_path'] = f"/root/{config['dynamic_deploy_path']}/auth/kubeconfig"
        # Required Environment Variables for Install script
        env = {
            "SSHKEY_TOKEN": config['sshkey_token'],
            "ORCHESTRATION_HOST": config['orchestration_host'],
            "ORCHESTRATION_USER": config['orchestration_user'],
            "OPENSHIFT_CLUSTER_NAME": config['openshift_cluster_name'],
            "DEPLOY_PATH": config['dynamic_deploy_path'],
            "KUBECONFIG_NAME":
            f"{self.version}-{self.platform}-{self.profile}-kubeconfig",
            **self._insert_kube_env()
        }

        # Dump all vars to json file for Ansible to pick up
        with open(
                f"/tmp/{self.version}-{self.platform}-{self.profile}-{operation}-task.json",
                'w') as json_file:
            json.dump(config, json_file, sort_keys=True, indent=4)

        return BashOperator(
            task_id=f"{operation}",
            depends_on_past=False,
            bash_command=
            f"{constants.root_dag_dir}/scripts/install_cluster.sh -p {self.platform} -v {self.version} -j /tmp/{self.version}-{self.platform}-{self.profile}-{operation}-task.json -o {operation}",
            retries=3,
            dag=self.dag,
            trigger_rule=trigger_rule,
            executor_config=self.exec_config,
            env=env)