Exemplo n.º 1
0
    def execute(self, data, parent_data):

        act_info = data.get_one_of_inputs('act_info')

        app_id = act_info['app_id']
        cluster_name = act_info['cluster_name']
        cluster_user = act_info['cluster_user']
        hosts_info = act_info['hosts_info']
        bk_username = act_info['bk_username']

        if len([info['ip'] for info in hosts_info if info['add'] == 1]) == 0:
            logger_celery.warning("该活动节点没有对应新的ip可以执行,正常返回")
            data.outputs.result_message = "skip"
            return True

        source_ips = [info['ip'] for info in hosts_info]
        target_ips = source_ips

        kwargs = {
            "bk_biz_id":
            app_id,
            "bk_username":
            bk_username,
            "task_name":
            f"{cluster_name}节点之间推送公钥文件",
            "file_target_path":
            f"{HADOOP_PACKAGE_PATH}/ssh/",
            "file_source_list": [{
                "file_list": [f"/home/{cluster_user}/.ssh/id_rsa.pub.*"],
                "account": {
                    "alias": "root"
                },
                "server": {
                    "ip_list": get_job_ip_list(source_ips)
                },
            }],
            "target_server": {
                "ip_list": get_job_ip_list(target_ips)
            },
        }

        res = JobV3Api.fast_transfer_file(
            {
                **kwargs,
                **fast_transfer_file_common_kwargs
            }, raw=True)
        if not res['result']:
            # 调用job任务失败,则结果直接输出fail给前端展示
            data.outputs.result_message = 'fail'
        else:
            job_instance_id = res['data']['job_instance_id']
            data.outputs.job_instance_id = job_instance_id
            data.outputs.target_ips = target_ips
        return res['result']
Exemplo n.º 2
0
    def execute(self, data, parent_data):

        act_info = data.get_one_of_inputs('act_info')
        app_id = act_info['app_id']
        cluster_user = act_info['cluster_user']
        cluster_name = act_info['cluster_name']
        hosts_info = act_info['hosts_info']
        bk_username = act_info['bk_username']
        ssh_port = str(act_info['ssh_port'])

        if len([info['ip'] for info in hosts_info if info['add'] == 1]) == 0:
            logger_celery.warning("该活动节点没有对应新的ip可以执行,正常返回")
            data.outputs.result_message = "skip"
            return True

        target_ips = [info['ip'] for info in hosts_info]

        kwargs = {
            "bk_biz_id":
            app_id,
            "bk_username":
            bk_username,
            "task_name":
            f"{cluster_name}集群各节点测试免认证",
            "script_content":
            get_script(
                'hadoop_bamboo/components/collections/script_templates/check_free_login.sh'
            ),
            "script_param":
            get_script_param(
                [cluster_user, ssh_port,
                 get_host_str(target_ips, hosts_info)]),
            "target_server": {
                "ip_list": get_job_ip_list(target_ips)
            },
        }

        res = JobV3Api.fast_execute_script(
            {
                **kwargs,
                **fast_execute_script_common_kwargs
            }, raw=True)
        if not res['result']:
            # 调用job任务失败,则结果直接输出fail给前端展示
            data.outputs.result_message = 'fail'
        else:
            job_instance_id = res['data']['job_instance_id']
            data.outputs.job_instance_id = job_instance_id
            data.outputs.target_ips = target_ips
        return res['result']
Exemplo n.º 3
0
    def execute(self, data, parent_data):

        act_info = data.get_one_of_inputs('act_info')

        app_id = act_info['app_id']
        hosts_info = act_info['hosts_info']
        cluster_version = act_info['cluster_version']
        bk_username = act_info['bk_username']

        target_ips = [info['ip'] for info in hosts_info if info['add'] == 1]

        if len(target_ips) == 0:
            logger_celery.warning("该活动节点没有对应新的ip可以执行,正常返回")
            data.outputs.result_message = "skip"
            return True

        package_full_name_list = ([
            hadoop_package_full_name_dict[cluster_version]["hadoop"]["package"]
        ] + [
            hadoop_package_full_name_dict[cluster_version]["java"]["package"]
        ] + [
            hadoop_package_full_name_dict[cluster_version]["zookeeper"]
            ["package"]
        ])

        kwargs = {
            "bk_biz_id":
            app_id,
            "bk_username":
            bk_username,
            "task_name":
            f"{act_info['cluster_name']}集群新节点安装包分发过程",
            "file_target_path":
            HADOOP_PACKAGE_PATH,
            "file_source_list": [{
                "file_list": package_full_name_list,
                "account": {
                    "alias": "root"
                },
                "server": {
                    "ip_list": package_source_ip_list
                },
            }],
            "target_server": {
                "ip_list": get_job_ip_list(target_ips)
            },
            'account_alias':
            'root',
        }

        res = JobV3Api.fast_transfer_file(
            {
                **kwargs,
                **fast_transfer_file_common_kwargs
            }, raw=True)
        if not res['result']:
            # 调用job任务失败,则结果直接输出fail给前端展示
            data.outputs.result_message = 'fail'
        else:
            job_instance_id = res['data']['job_instance_id']
            data.outputs.job_instance_id = job_instance_id
            data.outputs.target_ips = target_ips
        return res['result']
Exemplo n.º 4
0
    def execute(self, data, parent_data):

        act_info = data.get_one_of_inputs('act_info')

        app_id = act_info['app_id']
        bk_username = act_info['bk_username']
        hosts_info = act_info['hosts_info']
        cluster_version = act_info['cluster_version']
        cluster_user = act_info['cluster_user']
        base_dir = act_info['base_dir']
        data_disk_dir_list = act_info['data_disk_dir_list']

        java_file = "{}/java-{}.tar.gz".format(
            HADOOP_PACKAGE_PATH,
            hadoop_package_full_name_dict[cluster_version]["java"]["version"])
        hadoop_file = "{}/hadoop-{}.tar.gz".format(
            HADOOP_PACKAGE_PATH, hadoop_package_full_name_dict[cluster_version]
            ["hadoop"]["version"])
        zookeeper_file = "{}/zookeeper-{}.tar.gz".format(
            HADOOP_PACKAGE_PATH, hadoop_package_full_name_dict[cluster_version]
            ["zookeeper"]["version"])

        # 解决shell数组遍历问题,没有字符串都需要加'' 来封装来传递
        hadoop_data_str = ",".join(data_disk_dir_list)

        dns_hosts_list = []
        for host in hosts_info:
            ip = host.get("ip")
            host_name = host.get("host_name")
            dns_hosts_list.append("'{}:{}'".format(ip, host_name))
        dns_hosts_str = ",".join(dns_hosts_list)

        target_ips = [info['ip'] for info in hosts_info if info['add'] == 1]

        if len(target_ips) == 0:
            logger_celery.warning("该活动节点没有对应新的ip可以执行,正常返回")
            data.outputs.result_message = "skip"
            return True

        kwargs = {
            "bk_biz_id":
            app_id,
            "bk_username":
            bk_username,
            "task_name":
            f"{act_info['cluster_name']}集群新节点初始化过程",
            "script_content":
            get_script(
                'hadoop_bamboo/components/collections/script_templates/init_before_deploy.sh'
            ),
            "script_param":
            get_script_param([
                cluster_user, base_dir, hadoop_file, java_file, zookeeper_file,
                hadoop_data_str, dns_hosts_str
            ]),
            "target_server": {
                "ip_list": get_job_ip_list(target_ips)
            },
        }

        res = JobV3Api.fast_execute_script(
            {
                **kwargs,
                **fast_execute_script_common_kwargs
            }, raw=True)
        if not res['result']:
            # 调用job任务失败,则结果直接输出fail给前端展示
            data.outputs.result_message = 'fail'
        else:
            job_instance_id = res['data']['job_instance_id']
            data.outputs.job_instance_id = job_instance_id
            data.outputs.target_ips = target_ips
        return res['result']