def fast_push_file(self, kwargs): """ 快速分发文件 """ kwargs.update({ "bk_username": self.bk_username, }) result = JobV3Api.fast_transfer_file(kwargs, raw=True) if result["result"]: query_kwargs = { "job_instance_id": result["data"].get("job_instance_id"), "bk_biz_id": kwargs.get("bk_biz_id"), } result = self.get_task_result_status(query_kwargs) logger.info( build_job_exec_log_format(self.bk_username, 'fast_push_file', kwargs['task_name'])) return result else: logger.error( build_job_err_log_format(self.bk_username, 'fast_push_file', kwargs, result)) return None
def execute(self, data, parent_data): act_info = data.get_one_of_inputs('act_info') app_id = act_info['app_id'] cluster_name = act_info['cluster_name'] cluster_user = act_info['cluster_user'] hosts_info = act_info['hosts_info'] bk_username = act_info['bk_username'] if len([info['ip'] for info in hosts_info if info['add'] == 1]) == 0: logger_celery.warning("该活动节点没有对应新的ip可以执行,正常返回") data.outputs.result_message = "skip" return True source_ips = [info['ip'] for info in hosts_info] target_ips = source_ips kwargs = { "bk_biz_id": app_id, "bk_username": bk_username, "task_name": f"{cluster_name}节点之间推送公钥文件", "file_target_path": f"{HADOOP_PACKAGE_PATH}/ssh/", "file_source_list": [{ "file_list": [f"/home/{cluster_user}/.ssh/id_rsa.pub.*"], "account": { "alias": "root" }, "server": { "ip_list": get_job_ip_list(source_ips) }, }], "target_server": { "ip_list": get_job_ip_list(target_ips) }, } res = JobV3Api.fast_transfer_file( { **kwargs, **fast_transfer_file_common_kwargs }, raw=True) if not res['result']: # 调用job任务失败,则结果直接输出fail给前端展示 data.outputs.result_message = 'fail' else: job_instance_id = res['data']['job_instance_id'] data.outputs.job_instance_id = job_instance_id data.outputs.target_ips = target_ips return res['result']
def execute(self, data, parent_data): act_info = data.get_one_of_inputs('act_info') target_ips = data.get_one_of_inputs('target_ips') bk_username = act_info['bk_username'] app_id = act_info['app_id'] version = act_info['version'] package_full_name_list = ( [es_package_full_name_dict[version]["supervisor"]["package"]] + [es_package_full_name_dict[version]["pypy"]["package"]] + [es_package_full_name_dict[version]["TencentKona"]["package"]] + [es_package_full_name_dict[version]["elasticsearch"]["package"]]) kwargs = { "bk_biz_id": app_id, "bk_username": bk_username, "file_target_path": '/data', "file_source_list": [{ "file_list": package_full_name_list, "account": { "alias": "root" }, "server": { "ip_list": package_source_ip_list }, }], "target_server": { "ip_list": get_job_ip_list(target_ips) }, } res = JobV3Api.fast_transfer_file( { **kwargs, **fast_transfer_file_common_kwargs }, raw=True) if not res['result']: # 调用job任务失败,则结果直接输出fail给前端展示 data.outputs.result_message = 'fail' else: job_instance_id = res['data']['job_instance_id'] data.outputs.job_instance_id = job_instance_id data.outputs.target_ips = target_ips return res['result']
def execute(self, data, parent_data): act_info = data.get_one_of_inputs('act_info') app_id = act_info['app_id'] hosts_info = act_info['hosts_info'] cluster_version = act_info['cluster_version'] bk_username = act_info['bk_username'] target_ips = [info['ip'] for info in hosts_info if info['add'] == 1] if len(target_ips) == 0: logger_celery.warning("该活动节点没有对应新的ip可以执行,正常返回") data.outputs.result_message = "skip" return True package_full_name_list = ([ hadoop_package_full_name_dict[cluster_version]["hadoop"]["package"] ] + [ hadoop_package_full_name_dict[cluster_version]["java"]["package"] ] + [ hadoop_package_full_name_dict[cluster_version]["zookeeper"] ["package"] ]) kwargs = { "bk_biz_id": app_id, "bk_username": bk_username, "task_name": f"{act_info['cluster_name']}集群新节点安装包分发过程", "file_target_path": HADOOP_PACKAGE_PATH, "file_source_list": [{ "file_list": package_full_name_list, "account": { "alias": "root" }, "server": { "ip_list": package_source_ip_list }, }], "target_server": { "ip_list": get_job_ip_list(target_ips) }, 'account_alias': 'root', } res = JobV3Api.fast_transfer_file( { **kwargs, **fast_transfer_file_common_kwargs }, raw=True) if not res['result']: # 调用job任务失败,则结果直接输出fail给前端展示 data.outputs.result_message = 'fail' else: job_instance_id = res['data']['job_instance_id'] data.outputs.job_instance_id = job_instance_id data.outputs.target_ips = target_ips return res['result']