def execute(self, data, parent_data): executor = parent_data.get_one_of_inputs('executor') biz_cc_id = parent_data.get_one_of_inputs('biz_cc_id') client = get_client_by_user(executor) client.set_bk_api_ver('v2') if parent_data.get_one_of_inputs('language'): setattr(client, 'language', parent_data.get_one_of_inputs('language')) translation.activate(parent_data.get_one_of_inputs('language')) original_source_files = data.get_one_of_inputs('job_source_files', []) file_source = [] for item in original_source_files: ip_info = cc_get_ips_info_by_str(username=executor, biz_cc_id=biz_cc_id, ip_str=item['ip'], use_cache=False) file_source.append({ 'files': str(item['files']).strip().split("\n"), 'ip_list': [{ 'ip': _ip['InnerIP'], 'bk_cloud_id': _ip['Source'] } for _ip in ip_info['ip_result']], 'account': str(item['account']).strip(), }) original_ip_list = data.get_one_of_inputs('job_ip_list') ip_info = cc_get_ips_info_by_str(executor, biz_cc_id, original_ip_list) ip_list = [{ 'ip': _ip['InnerIP'], 'bk_cloud_id': _ip['Source'] } for _ip in ip_info['ip_result']] job_kwargs = { 'bk_biz_id': biz_cc_id, 'file_source': file_source, 'ip_list': ip_list, 'account': data.get_one_of_inputs('job_account'), 'file_target_path': data.get_one_of_inputs('job_target_path'), 'bk_callback_url': get_node_callback_url(self.id) } job_result = client.job.fast_push_file(job_kwargs) LOGGER.info('job_result: {result}, job_kwargs: {kwargs}'.format( result=job_result, kwargs=job_kwargs)) if job_result['result']: job_instance_id = job_result['data']['job_instance_id'] data.outputs.job_inst_id = job_instance_id data.outputs.job_inst_name = job_result['data'][ 'job_instance_name'] data.outputs.job_inst_url = get_job_instance_url( parent_data.inputs.biz_cc_id, job_instance_id) data.outputs.client = client return True else: data.outputs.ex_data = job_result['message'] return False
def execute(self, data, parent_data): executor = parent_data.get_one_of_inputs("executor") client = get_client_by_user(executor) client.set_bk_api_ver("v2") if parent_data.get_one_of_inputs("language"): setattr(client, "language", parent_data.get_one_of_inputs("language")) translation.activate(parent_data.get_one_of_inputs("language")) biz_cc_id = data.get_one_of_inputs("biz_cc_id", parent_data.inputs.biz_cc_id) original_source_files = deepcopy(data.get_one_of_inputs("job_source_files", [])) file_source = [] for item in original_source_files: ip_info = cc_get_ips_info_by_str( username=executor, biz_cc_id=biz_cc_id, ip_str=item["ip"], use_cache=False, ) file_source.append( { "files": [_file.strip() for _file in item["files"].split("\n") if _file.strip()], "ip_list": [{"ip": _ip["InnerIP"], "bk_cloud_id": _ip["Source"]} for _ip in ip_info["ip_result"]], "account": loose_strip(item["account"]), } ) original_ip_list = data.get_one_of_inputs("job_ip_list") ip_info = cc_get_ips_info_by_str(executor, biz_cc_id, original_ip_list) ip_list = [{"ip": _ip["InnerIP"], "bk_cloud_id": _ip["Source"]} for _ip in ip_info["ip_result"]] job_timeout = data.get_one_of_inputs("job_timeout") job_kwargs = { "bk_scope_type": JobBizScopeType.BIZ.value, "bk_scope_id": str(biz_cc_id), "bk_biz_id": biz_cc_id, "file_source": file_source, "ip_list": ip_list, "account": data.get_one_of_inputs("job_account"), "file_target_path": data.get_one_of_inputs("job_target_path"), "bk_callback_url": get_node_callback_url(self.root_pipeline_id, self.id, getattr(self, "version", "")), } if job_timeout: job_kwargs["timeout"] = int(job_timeout) job_result = client.job.fast_push_file(job_kwargs) self.logger.info("job_result: {result}, job_kwargs: {kwargs}".format(result=job_result, kwargs=job_kwargs)) if job_result["result"]: job_instance_id = job_result["data"]["job_instance_id"] data.outputs.job_inst_id = job_instance_id data.outputs.job_inst_name = job_result["data"]["job_instance_name"] data.outputs.job_inst_url = get_job_instance_url(biz_cc_id, job_instance_id) data.outputs.client = client return True else: message = job_handle_api_error("job.fast_push_file", job_kwargs, job_result) self.logger.error(message) data.outputs.ex_data = message return False
def get_value(self): if "executor" not in self.pipeline_data or "project_id" not in self.pipeline_data: return "ERROR: executor and project_id of pipeline is needed" var_ip_picker = self.value username = self.pipeline_data["executor"] project_id = self.pipeline_data["project_id"] project = Project.objects.get(id=project_id) bk_biz_id = project.bk_biz_id if project.from_cmdb else "" bk_supplier_account = supplier_account_for_project(project_id) produce_method = var_ip_picker["var_ip_method"] if produce_method == "custom": custom_value = var_ip_picker["var_ip_custom_value"] data = cc_get_ips_info_by_str(username, bk_biz_id, custom_value) ip_list = data["ip_result"] data = ",".join([ip["InnerIP"] for ip in ip_list]) else: ip_pattern = re.compile(ip_re) module_id_list = var_ip_picker["var_ip_tree"] module_inst_id_list = [] tree_ip_list = [] for custom_id in module_id_list: try: ip_or_module_id = custom_id.split("_")[-1] if ip_pattern.match(ip_or_module_id): # select certain ip tree_ip_list.append(ip_or_module_id) else: # select whole module module_inst_id_list.append(int(ip_or_module_id)) except Exception: logger.warning( "ip_picker module ip transit failed: {origin}".format( origin=custom_id)) # query cc to get module's ip list and filter tree_ip_list host_list = cc_get_inner_ip_by_module_id( username, bk_biz_id, module_inst_id_list, bk_supplier_account, ["host_id", "bk_host_innerip"]) cc_ip_list = cc_get_ips_info_by_str( username, bk_biz_id, ",".join(tree_ip_list))["ip_result"] select_ip = set() for host_info in host_list: select_ip.add(host_info["host"].get("bk_host_innerip", "")) for ip_info in cc_ip_list: select_ip.add(ip_info["InnerIP"]) data = ",".join(list(set(select_ip))) return data
def get_value(self): var_ip_picker = self.value username = self.pipeline_data['executor'] project_id = self.pipeline_data['project_id'] project = Project.objects.get(id=project_id) bk_biz_id = project.bk_biz_id if project.from_cmdb else '' bk_supplier_account = supplier_account_for_project(project_id) produce_method = var_ip_picker['var_ip_method'] if produce_method == 'custom': custom_value = var_ip_picker['var_ip_custom_value'] data = cc_get_ips_info_by_str(username, bk_biz_id, custom_value) ip_list = data['ip_result'] data = ','.join([ip['InnerIP'] for ip in ip_list]) else: ip_pattern = re.compile(ip_re) module_id_list = var_ip_picker['var_ip_tree'] module_inst_id_list = [] tree_ip_list = [] for custom_id in module_id_list: try: ip_or_module_id = custom_id.split('_')[-1] if ip_pattern.match(ip_or_module_id): # select certain ip tree_ip_list.append(ip_or_module_id) else: # select whole module module_inst_id_list.append(int(ip_or_module_id)) except Exception: logger.warning( 'ip_picker module ip transit failed: {origin}'.format( origin=custom_id)) # query cc to get module's ip list and filter tree_ip_list host_list = cc_get_inner_ip_by_module_id(username, bk_biz_id, module_inst_id_list, bk_supplier_account) cc_ip_list = cc_get_ips_info_by_str( username, bk_biz_id, ','.join(tree_ip_list))['ip_result'] select_ip = set() for host_info in host_list: select_ip.add(host_info['host']['bk_host_innerip']) for ip_info in cc_ip_list: select_ip.add(ip_info['InnerIP']) data = ','.join(list(set(select_ip))) return data
def execute(self, data, parent_data): executor = parent_data.get_one_of_inputs('executor') biz_cc_id = parent_data.get_one_of_inputs('biz_cc_id') client = get_client_by_user(executor) client.set_bk_api_ver('v2') if parent_data.get_one_of_inputs('language'): setattr(client, 'language', parent_data.get_one_of_inputs('language')) translation.activate(parent_data.get_one_of_inputs('language')) original_global_var = data.get_one_of_inputs('job_global_var') global_vars = [] for _value in original_global_var: # 1-字符串,2-IP if _value['type'] == 2: var_ip = cc_get_ips_info_by_str(username=executor, biz_cc_id=biz_cc_id, ip_str=_value['value'], use_cache=False) ip_list = [{ 'ip': _ip['InnerIP'], 'bk_cloud_id': _ip['Source'] } for _ip in var_ip['ip_result']] if _value['value'].strip() and not ip_list: data.outputs.ex_data = _( u"无法从配置平台(CMDB)查询到对应 IP,请确认输入的 IP 是否合法") return False if ip_list: global_vars.append({ 'name': _value['name'], 'ip_list': ip_list, }) else: global_vars.append({ 'name': _value['name'], 'value': str(_value['value']).strip(), }) job_kwargs = { 'bk_biz_id': biz_cc_id, 'bk_job_id': data.get_one_of_inputs('job_task_id'), 'global_vars': global_vars, 'bk_callback_url': get_node_callback_url(self.id) } job_result = client.job.execute_job(job_kwargs) LOGGER.info('job_result: {result}, job_kwargs: {kwargs}'.format( result=job_result, kwargs=job_kwargs)) if job_result['result']: job_instance_id = job_result['data']['job_instance_id'] data.outputs.job_inst_url = get_job_instance_url( parent_data.inputs.biz_cc_id, job_instance_id) data.outputs.job_inst_id = job_instance_id data.outputs.job_inst_name = job_result['data'][ 'job_instance_name'] data.outputs.client = client return True else: data.outputs.ex_data = job_result['message'] return False
def execute(self, data, parent_data): method = self.host_lock_method() executor = parent_data.get_one_of_inputs("executor") biz_cc_id = parent_data.get_one_of_inputs("biz_cc_id") client = get_client_by_user(executor) if parent_data.get_one_of_inputs("language"): setattr(client, "language", parent_data.get_one_of_inputs("language")) translation.activate(parent_data.get_one_of_inputs("language")) cc_host_ip = data.get_one_of_inputs("cc_host_ip") id_info = cc_get_ips_info_by_str(executor, biz_cc_id, cc_host_ip) if id_info["invalid_ip"]: data.outputs.ex_data = _("无法从配置平台(CMDB)查询到对应 IP,请确认输入的 IP 是否合法") return False host_list = [_ip["HostID"] for _ip in id_info["ip_result"]] cc_host_lock_kwargs = {"id_list": host_list} cc_host_lock_method = getattr(client.cc, method) cc_host_lock_result = cc_host_lock_method(cc_host_lock_kwargs) if cc_host_lock_result["result"]: return True message = handle_api_error(__group_name__, "cc.{method}".format(method=method), cc_host_lock_kwargs, cc_host_lock_result) data.set_outputs("ex_data", message) self.logger.error(message) return False
def execute(self, data, parent_data): executor = parent_data.get_one_of_inputs('executor') biz_cc_id = parent_data.get_one_of_inputs('biz_cc_id') client = get_client_by_user(executor) client.set_bk_api_ver("v2") if parent_data.get_one_of_inputs('language'): setattr(client, 'language', parent_data.get_one_of_inputs('language')) translation.activate(parent_data.get_one_of_inputs('language')) original_source_files = data.get_one_of_inputs('job_source_files', []) file_source = [] for item in original_source_files: ip_info = cc_get_ips_info_by_str(executor, biz_cc_id, item['ip']) file_source.append({ 'files': item['files'].strip().split("\n"), 'ip_list': [{ 'ip': _ip['InnerIP'], 'bk_cloud_id': _ip['Source'] } for _ip in ip_info['ip_result']], 'account': item['account'].strip(), }) original_ip_list = data.get_one_of_inputs('job_ip_list') ip_info = cc_get_ips_info_by_str(executor, biz_cc_id, original_ip_list) ip_list = [{'ip': _ip['InnerIP'], 'bk_cloud_id': _ip['Source']} for _ip in ip_info['ip_result']] job_kwargs = { 'bk_biz_id': biz_cc_id, 'file_source': file_source, 'ip_list': ip_list, 'account': data.get_one_of_inputs('job_account'), 'file_target_path': data.get_one_of_inputs('job_target_path'), } job_result = client.job.fast_push_file(job_kwargs) if job_result['result']: data.set_outputs('job_inst_id', job_result['data']['job_instance_id']) data.set_outputs('job_inst_name', job_result['data']['job_instance_name']) data.set_outputs('client', client) self.set_outputs_job_url(data, parent_data) return True else: data.set_outputs('ex_data', job_result['message']) return False
def execute(self, data, parent_data): executor = parent_data.inputs.executor biz_cc_id = parent_data.inputs.biz_cc_id client = get_client_by_user(executor) original_ip_list = data.get_one_of_inputs("job_ip_list") across_biz = data.get_one_of_inputs("job_across_biz", False) if across_biz: ip_info = {"ip_result": []} for match in plat_ip_reg.finditer(original_ip_list): if not match: continue ip_str = match.group() cloud_id, inner_ip = ip_str.split(":") ip_info["ip_result"].append({"InnerIP": inner_ip, "Source": cloud_id}) else: ip_info = cc_get_ips_info_by_str( username=executor, biz_cc_id=biz_cc_id, ip_str=original_ip_list, use_cache=False, ) ip_list = [{"ip": _ip["InnerIP"], "bk_cloud_id": _ip["Source"]} for _ip in ip_info["ip_result"]] job_kwargs = { "bk_scope_type": JobBizScopeType.BIZ.value, "bk_scope_id": str(biz_cc_id), "bk_biz_id": biz_cc_id, "account": data.get_one_of_inputs("file_account"), "file_target_path": data.get_one_of_inputs("file_path"), "file_list": [ { "file_name": data.get_one_of_inputs("local_name"), "content": base64.b64encode(data.get_one_of_inputs("local_content").encode("utf-8")).decode( "utf-8" ), } ], "ip_list": ip_list, } job_result = client.job.push_config_file(job_kwargs) self.logger.info("job_result: {result}, job_kwargs: {kwargs}".format(result=job_result, kwargs=job_kwargs)) if job_result["result"]: job_instance_id = job_result["data"]["job_instance_id"] data.outputs.job_inst_id = job_instance_id data.outputs.job_inst_name = job_result["data"]["job_instance_name"] data.outputs.job_inst_url = get_job_instance_url(biz_cc_id, job_instance_id) return True else: message = job_handle_api_error("job.push_config_file", job_kwargs, job_result) self.logger.error(message) data.outputs.ex_data = message return False
def execute(self, data, parent_data): executor = parent_data.inputs.executor biz_cc_id = data.inputs.biz_cc_id local_files = data.inputs.job_local_files target_ip_list = data.inputs.job_target_ip_list target_account = data.inputs.job_target_account target_path = data.inputs.job_target_path file_manager_type = EnvironmentVariables.objects.get_var( 'BKAPP_FILE_MANAGER_TYPE') if not file_manager_type: data.outputs.ex_data = 'File Manager configuration error, contact administrator please.' return False try: file_manager = ManagerFactory.get_manager( manager_type=file_manager_type) except Exception as e: err_msg = 'can not get file manager for type: {}\n err: {}' self.logger.error( err_msg.format(file_manager_type, traceback.format_exc())) data.outputs.ex_data = err_msg.format(file_manager_type, e) return False client = get_client_by_user(executor) ip_info = cc_get_ips_info_by_str(executor, biz_cc_id, target_ip_list) ip_list = [{ 'ip': _ip['InnerIP'], 'bk_cloud_id': _ip['Source'] } for _ip in ip_info['ip_result']] file_tags = [_file['tag'] for _file in local_files] push_result = file_manager.push_files_to_ips( esb_client=client, bk_biz_id=biz_cc_id, file_tags=file_tags, target_path=target_path, ips=ip_list, account=target_account, callback_url=get_node_callback_url(self.id)) if not push_result['result']: data.outputs.ex_data = push_result['message'] return False job_instance_id = push_result['data']['job_id'] data.outputs.job_inst_id = job_instance_id data.outputs.job_inst_url = get_job_instance_url( biz_cc_id, job_instance_id) return True
def execute(self, data, parent_data): executor = parent_data.get_one_of_inputs('executor') biz_cc_id = parent_data.get_one_of_inputs('biz_cc_id') client = get_client_by_user(executor) client.set_bk_api_ver("v2") if parent_data.get_one_of_inputs('language'): setattr(client, 'language', parent_data.get_one_of_inputs('language')) translation.activate(parent_data.get_one_of_inputs('language')) original_ip_list = data.get_one_of_inputs('job_ip_list') ip_info = cc_get_ips_info_by_str(executor, biz_cc_id, original_ip_list) ip_list = [{'ip': _ip['InnerIP'], 'bk_cloud_id': _ip['Source']} for _ip in ip_info['ip_result']] job_kwargs = { 'bk_biz_id': biz_cc_id, 'script_timeout': data.get_one_of_inputs('job_script_timeout'), 'account': data.get_one_of_inputs('job_account'), 'ip_list': ip_list, } script_param = data.get_one_of_inputs('job_script_param') if script_param: job_kwargs.update({ 'script_param': base64.b64encode(script_param.encode('utf-8')) }) script_source = data.get_one_of_inputs('job_script_source') if script_source in ["general", "public"]: job_kwargs.update({ "script_id": data.get_one_of_inputs('job_script_list_%s' % script_source) }) else: job_kwargs.update({ 'script_type': data.get_one_of_inputs('job_script_type'), 'script_content': base64.b64encode(data.get_one_of_inputs('job_content').encode('utf-8')), }) job_result = client.job.fast_execute_script(job_kwargs) if job_result['result']: data.set_outputs('job_inst_id', job_result['data']['job_instance_id']) data.set_outputs('job_inst_name', job_result['data']['job_instance_name']) data.set_outputs('client', client) self.set_outputs_job_url(data, parent_data) return True else: data.set_outputs('ex_data', '%s, invalid ip: %s' % (job_result['message'], ','.join(ip_info['invalid_ip']))) return False
def execute(self, data, parent_data): executor = parent_data.get_one_of_inputs('executor') biz_cc_id = parent_data.get_one_of_inputs('biz_cc_id') client = get_client_by_user(executor) client.set_bk_api_ver("v2") if parent_data.get_one_of_inputs('language'): setattr(client, 'language', parent_data.get_one_of_inputs('language')) translation.activate(parent_data.get_one_of_inputs('language')) original_global_var = data.get_one_of_inputs('job_global_var') global_vars = [] for _value in original_global_var: # 1-字符串,2-IP if _value['type'] == 2: var_ip = cc_get_ips_info_by_str( executor, biz_cc_id, _value['value']) ip_list = [{'ip': _ip['InnerIP'], 'bk_cloud_id': _ip['Source']} for _ip in var_ip['ip_result']] global_vars.append({ 'id': _value['id'], 'ip_list': ip_list, }) else: global_vars.append({ 'id': _value['id'], 'value': _value['value'].strip(), }) job_kwargs = { 'bk_biz_id': biz_cc_id, 'bk_job_id': data.get_one_of_inputs('job_task_id'), 'global_vars': global_vars, } job_result = client.job.execute_job(job_kwargs) if job_result['result']: data.set_outputs('job_inst_id', job_result['data']['job_instance_id']) data.set_outputs('job_inst_name', job_result['data']['job_instance_name']) data.set_outputs('client', client) self.set_outputs_job_url(data, parent_data) return True else: data.set_outputs('ex_data', job_result['message']) return False
def get_value(self): var_ip_picker = self.value username = self.pipeline_data['executor'] biz_cc_id = self.pipeline_data['biz_cc_id'] produce_method = var_ip_picker['var_ip_method'] if produce_method == 'custom': custom_value = var_ip_picker['var_ip_custom_value'] data = cc_get_ips_info_by_str(username, biz_cc_id, custom_value) ip_list = data['ip_result'] data = ','.join([ip['InnerIP'] for ip in ip_list]) else: select_data = var_ip_picker['var_ip_tree'] select_ip = map(lambda x: get_ip_by_regex(x)[0], filter(lambda x: get_ip_by_regex(x), select_data)) data = ','.join(list(set(select_ip))) return data
def test_cc_get_ips_info_by_str(self): mock_get_client_by_user.success = True ip_result = host_list_data r1 = utils.cc_get_ips_info_by_str(username='******', biz_cc_id=789, ip_str='1.1.1.1, 2.2.2.2') r2 = utils.cc_get_ips_info_by_str(username='******', biz_cc_id=790, ip_str='1.1.1.1, 2.2.2.2, 3.3.3.3') r3 = utils.cc_get_ips_info_by_str( username='******', biz_cc_id=791, ip_str='SetName1|ModuleName1|1.1.1.1, ' 'SetName2|ModuleName2|2.2.2.2, ' 'set|module|3.3.3.3') r4 = utils.cc_get_ips_info_by_str( username='******', biz_cc_id=792, ip_str='Source1:1.1.1.1, Source2:2.2.2.2, 3:3.3.3.3') self.assertEqual(r1['ip_result'], ip_result) self.assertEqual(r1['invalid_ip'], []) self.assertEqual(r2['ip_result'], ip_result) self.assertEqual(r2['invalid_ip'], ['3.3.3.3']) self.assertEqual(r3['ip_result'], ip_result) self.assertEqual(r3['invalid_ip'], ['3.3.3.3']) self.assertEqual(r4['ip_result'], ip_result) self.assertEqual(r4['invalid_ip'], ['3.3.3.3']) mock_get_client_by_user.success = False r5 = utils.cc_get_ips_info_by_str(username='******', biz_cc_id=793, ip_str='1.1.1.1, 2.2.2.2, 3.3.3.3') r6 = utils.cc_get_ips_info_by_str( username='******', biz_cc_id=794, ip_str='SetName1|ModuleName1|1.1.1.1, ' 'SetName2|ModuleName2|2.2.2.2, ' 'set|module|3.3.3.3') r7 = utils.cc_get_ips_info_by_str( username='******', biz_cc_id=795, ip_str='Source1:1.1.1.1, Source2:2.2.2.2, 3:3.3.3.3') self.assertEqual(r5['ip_result'], []) self.assertEqual(r6['ip_result'], []) self.assertEqual(r7['ip_result'], [])
def execute(self, data, parent_data): executor = parent_data.get_one_of_inputs('executor') client = get_client_by_user(executor) if parent_data.get_one_of_inputs('language'): setattr(client, 'language', parent_data.get_one_of_inputs('language')) translation.activate(parent_data.get_one_of_inputs('language')) biz_cc_id = data.get_one_of_inputs('biz_cc_id', parent_data.inputs.biz_cc_id) original_ip_list = data.get_one_of_inputs('job_ip_list') ip_info = cc_get_ips_info_by_str(username=executor, biz_cc_id=biz_cc_id, ip_str=original_ip_list, use_cache=False) ip_list = [{ 'ip': _ip['InnerIP'], 'bk_cloud_id': _ip['Source'] } for _ip in ip_info['ip_result']] job_kwargs = { 'bk_biz_id': biz_cc_id, 'script_timeout': data.get_one_of_inputs('job_script_timeout'), 'account': data.get_one_of_inputs('job_account'), 'ip_list': ip_list, 'bk_callback_url': get_node_callback_url(self.id) } script_param = data.get_one_of_inputs('job_script_param') if script_param: job_kwargs.update({ 'script_param': base64.b64encode(script_param.encode('utf-8')) }) script_source = data.get_one_of_inputs('job_script_source') if script_source in ["general", "public"]: job_kwargs.update({ "script_id": data.get_one_of_inputs('job_script_list_%s' % script_source) }) else: job_kwargs.update({ 'script_type': data.get_one_of_inputs('job_script_type'), 'script_content': base64.b64encode( data.get_one_of_inputs('job_content').encode('utf-8')), }) job_result = client.job.fast_execute_script(job_kwargs) LOGGER.info('job_result: {result}, job_kwargs: {kwargs}'.format( result=job_result, kwargs=job_kwargs)) if job_result['result']: job_instance_id = job_result['data']['job_instance_id'] data.outputs.job_inst_id = job_instance_id data.outputs.job_inst_name = job_result['data'][ 'job_instance_name'] data.outputs.job_inst_url = get_job_instance_url( biz_cc_id, job_instance_id) data.outputs.client = client return True else: message = job_handle_api_error('job.fast_execute_script', job_kwargs, job_result) self.logger.error(message) data.outputs.ex_data = message return False
def get_job_content(remote_files, operator, biz_cc_id): """ 根据ip、文件路径获取远程服务器的以base64编码的文件内容 @param remote_files: 文件集合 [{"file_path":"", "ip":"只支持单个ip", "job_account":""}] @param operator: 操作人员 @param biz_cc_id: 业务id @return: { "success": [ {"file_name": "file_name", "content": "content", "ip": "1.1.1.2"} ], "failure": [ {"file_name": "file_name", "ip": "1.1.1.1", "message": "error"} ] } """ client = get_client_by_user(operator) job_execute_suc_records = [] job_execute_fail_records = [] ip_str = ",".join([remote_file["ip"] for remote_file in remote_files]) ip_info = cc_get_ips_info_by_str( username=operator, biz_cc_id=biz_cc_id, ip_str=ip_str, use_cache=False, ) ip_list_result = [{"ip": _ip["InnerIP"], "bk_cloud_id": _ip["Source"]} for _ip in ip_info["ip_result"]] _ip_list_result = [_ip["InnerIP"] for _ip in ip_info["ip_result"]] for remote_file in remote_files: script_param = remote_file["file_path"] _, file_name = os.path.split(script_param) job_account = remote_file["job_account"] if remote_file["ip"] not in _ip_list_result: job_execute_fail_records.append( { "file_name": file_name, "ip": remote_file["ip"], "message": "ip 信息在 cmdb 不存在", "ip_list": remote_file["ip"], } ) continue for ip_list in ip_list_result: if remote_file["ip"] != ip_list["ip"]: continue job_kwargs = { "bk_biz_id": biz_cc_id, "account": job_account, "ip_list": [ip_list], "script_param": base64.b64encode(script_param.encode("utf-8")).decode("utf-8"), "script_type": SCRIPT_TYPE, "script_content": base64.b64encode(SCRIPT_CONTENT.encode("utf-8")).decode("utf-8"), } job_result = client.job.fast_execute_script(job_kwargs) logger.info("job_result: {result}, job_kwargs: {kwargs}".format(result=job_result, kwargs=job_kwargs)) if job_result["result"]: job_instance_id = job_result["data"]["job_instance_id"] job_execute_suc_records.append(({"file_name": file_name, "ip": remote_file["ip"]}, job_instance_id)) else: job_execute_fail_records.append( { "file_name": file_name, "ip": remote_file["ip"], "message": job_result["message"], "ip_list": ip_list, } ) polling_job_results = [] with ThreadPoolExecutor(max_workers=10) as t: all_task = [t.submit(get_job_instance_log, task, operator, biz_cc_id) for task in job_execute_suc_records] wait(all_task, return_when=ALL_COMPLETED) for job_result in all_task: polling_job_results.append(job_result.result()) # 获取轮询结果 result_success = [] for polling_job_result in polling_job_results: if polling_job_result["result"]: result_success.append( { "file_name": polling_job_result["key"]["file_name"], "content": polling_job_result["log_content"], "ip": polling_job_result["key"]["ip"], } ) else: job_execute_fail_records.append( { "file_name": polling_job_result["key"]["file_name"], "ip": polling_job_result["key"]["ip"], "message": polling_job_result["message"], } ) result = {"failure": job_execute_fail_records, "success": result_success} return result