def execute(self, data, parent_data): executor = parent_data.get_one_of_inputs('executor') biz_cc_id = parent_data.get_one_of_inputs('biz_cc_id') client = get_client_by_user(executor) client.set_bk_api_ver('v2') if parent_data.get_one_of_inputs('language'): setattr(client, 'language', parent_data.get_one_of_inputs('language')) translation.activate(parent_data.get_one_of_inputs('language')) original_global_var = data.get_one_of_inputs('job_global_var') global_vars = [] for _value in original_global_var: # 1-字符串,2-IP if _value['type'] == 2: var_ip = cc_get_ips_info_by_str(username=executor, biz_cc_id=biz_cc_id, ip_str=_value['value'], use_cache=False) ip_list = [{ 'ip': _ip['InnerIP'], 'bk_cloud_id': _ip['Source'] } for _ip in var_ip['ip_result']] if _value['value'].strip() and not ip_list: data.outputs.ex_data = _( u"无法从配置平台(CMDB)查询到对应 IP,请确认输入的 IP 是否合法") return False if ip_list: global_vars.append({ 'name': _value['name'], 'ip_list': ip_list, }) else: global_vars.append({ 'name': _value['name'], 'value': str(_value['value']).strip(), }) job_kwargs = { 'bk_biz_id': biz_cc_id, 'bk_job_id': data.get_one_of_inputs('job_task_id'), 'global_vars': global_vars, 'bk_callback_url': get_node_callback_url(self.id) } job_result = client.job.execute_job(job_kwargs) LOGGER.info('job_result: {result}, job_kwargs: {kwargs}'.format( result=job_result, kwargs=job_kwargs)) if job_result['result']: job_instance_id = job_result['data']['job_instance_id'] data.outputs.job_inst_url = get_job_instance_url( parent_data.inputs.biz_cc_id, job_instance_id) data.outputs.job_inst_id = job_instance_id data.outputs.job_inst_name = job_result['data'][ 'job_instance_name'] data.outputs.client = client return True else: data.outputs.ex_data = job_result['message'] return False
def execute(self, data, parent_data): executor = parent_data.get_one_of_inputs('executor') biz_cc_id = parent_data.get_one_of_inputs('biz_cc_id') client = get_client_by_user(executor) client.set_bk_api_ver('v2') if parent_data.get_one_of_inputs('language'): setattr(client, 'language', parent_data.get_one_of_inputs('language')) translation.activate(parent_data.get_one_of_inputs('language')) original_source_files = data.get_one_of_inputs('job_source_files', []) file_source = [] for item in original_source_files: ip_info = cc_get_ips_info_by_str(username=executor, biz_cc_id=biz_cc_id, ip_str=item['ip'], use_cache=False) file_source.append({ 'files': str(item['files']).strip().split("\n"), 'ip_list': [{ 'ip': _ip['InnerIP'], 'bk_cloud_id': _ip['Source'] } for _ip in ip_info['ip_result']], 'account': str(item['account']).strip(), }) original_ip_list = data.get_one_of_inputs('job_ip_list') ip_info = cc_get_ips_info_by_str(executor, biz_cc_id, original_ip_list) ip_list = [{ 'ip': _ip['InnerIP'], 'bk_cloud_id': _ip['Source'] } for _ip in ip_info['ip_result']] job_kwargs = { 'bk_biz_id': biz_cc_id, 'file_source': file_source, 'ip_list': ip_list, 'account': data.get_one_of_inputs('job_account'), 'file_target_path': data.get_one_of_inputs('job_target_path'), 'bk_callback_url': get_node_callback_url(self.id) } job_result = client.job.fast_push_file(job_kwargs) LOGGER.info('job_result: {result}, job_kwargs: {kwargs}'.format( result=job_result, kwargs=job_kwargs)) if job_result['result']: job_instance_id = job_result['data']['job_instance_id'] data.outputs.job_inst_id = job_instance_id data.outputs.job_inst_name = job_result['data'][ 'job_instance_name'] data.outputs.job_inst_url = get_job_instance_url( parent_data.inputs.biz_cc_id, job_instance_id) data.outputs.client = client return True else: data.outputs.ex_data = job_result['message'] return False
def execute(self, data, parent_data): executor = parent_data.get_one_of_inputs("executor") client = get_client_by_user(executor) client.set_bk_api_ver("v2") if parent_data.get_one_of_inputs("language"): setattr(client, "language", parent_data.get_one_of_inputs("language")) translation.activate(parent_data.get_one_of_inputs("language")) biz_cc_id = data.get_one_of_inputs("biz_cc_id", parent_data.inputs.biz_cc_id) original_source_files = deepcopy(data.get_one_of_inputs("job_source_files", [])) file_source = [] for item in original_source_files: ip_info = cc_get_ips_info_by_str( username=executor, biz_cc_id=biz_cc_id, ip_str=item["ip"], use_cache=False, ) file_source.append( { "files": [_file.strip() for _file in item["files"].split("\n") if _file.strip()], "ip_list": [{"ip": _ip["InnerIP"], "bk_cloud_id": _ip["Source"]} for _ip in ip_info["ip_result"]], "account": loose_strip(item["account"]), } ) original_ip_list = data.get_one_of_inputs("job_ip_list") ip_info = cc_get_ips_info_by_str(executor, biz_cc_id, original_ip_list) ip_list = [{"ip": _ip["InnerIP"], "bk_cloud_id": _ip["Source"]} for _ip in ip_info["ip_result"]] job_timeout = data.get_one_of_inputs("job_timeout") job_kwargs = { "bk_scope_type": JobBizScopeType.BIZ.value, "bk_scope_id": str(biz_cc_id), "bk_biz_id": biz_cc_id, "file_source": file_source, "ip_list": ip_list, "account": data.get_one_of_inputs("job_account"), "file_target_path": data.get_one_of_inputs("job_target_path"), "bk_callback_url": get_node_callback_url(self.root_pipeline_id, self.id, getattr(self, "version", "")), } if job_timeout: job_kwargs["timeout"] = int(job_timeout) job_result = client.job.fast_push_file(job_kwargs) self.logger.info("job_result: {result}, job_kwargs: {kwargs}".format(result=job_result, kwargs=job_kwargs)) if job_result["result"]: job_instance_id = job_result["data"]["job_instance_id"] data.outputs.job_inst_id = job_instance_id data.outputs.job_inst_name = job_result["data"]["job_instance_name"] data.outputs.job_inst_url = get_job_instance_url(biz_cc_id, job_instance_id) data.outputs.client = client return True else: message = job_handle_api_error("job.fast_push_file", job_kwargs, job_result) self.logger.error(message) data.outputs.ex_data = message return False
def execute(self, data, parent_data): executor = parent_data.inputs.executor biz_cc_id = parent_data.inputs.biz_cc_id client = get_client_by_user(executor) original_ip_list = data.get_one_of_inputs("job_ip_list") across_biz = data.get_one_of_inputs("job_across_biz", False) if across_biz: ip_info = {"ip_result": []} for match in plat_ip_reg.finditer(original_ip_list): if not match: continue ip_str = match.group() cloud_id, inner_ip = ip_str.split(":") ip_info["ip_result"].append({"InnerIP": inner_ip, "Source": cloud_id}) else: ip_info = cc_get_ips_info_by_str( username=executor, biz_cc_id=biz_cc_id, ip_str=original_ip_list, use_cache=False, ) ip_list = [{"ip": _ip["InnerIP"], "bk_cloud_id": _ip["Source"]} for _ip in ip_info["ip_result"]] job_kwargs = { "bk_scope_type": JobBizScopeType.BIZ.value, "bk_scope_id": str(biz_cc_id), "bk_biz_id": biz_cc_id, "account": data.get_one_of_inputs("file_account"), "file_target_path": data.get_one_of_inputs("file_path"), "file_list": [ { "file_name": data.get_one_of_inputs("local_name"), "content": base64.b64encode(data.get_one_of_inputs("local_content").encode("utf-8")).decode( "utf-8" ), } ], "ip_list": ip_list, } job_result = client.job.push_config_file(job_kwargs) self.logger.info("job_result: {result}, job_kwargs: {kwargs}".format(result=job_result, kwargs=job_kwargs)) if job_result["result"]: job_instance_id = job_result["data"]["job_instance_id"] data.outputs.job_inst_id = job_instance_id data.outputs.job_inst_name = job_result["data"]["job_instance_name"] data.outputs.job_inst_url = get_job_instance_url(biz_cc_id, job_instance_id) return True else: message = job_handle_api_error("job.push_config_file", job_kwargs, job_result) self.logger.error(message) data.outputs.ex_data = message return False
def execute(self, data, parent_data): executor = parent_data.inputs.executor biz_cc_id = data.inputs.biz_cc_id local_files = data.inputs.job_local_files target_ip_list = data.inputs.job_target_ip_list target_account = data.inputs.job_target_account target_path = data.inputs.job_target_path file_manager_type = EnvironmentVariables.objects.get_var( 'BKAPP_FILE_MANAGER_TYPE') if not file_manager_type: data.outputs.ex_data = 'File Manager configuration error, contact administrator please.' return False try: file_manager = ManagerFactory.get_manager( manager_type=file_manager_type) except Exception as e: err_msg = 'can not get file manager for type: {}\n err: {}' self.logger.error( err_msg.format(file_manager_type, traceback.format_exc())) data.outputs.ex_data = err_msg.format(file_manager_type, e) return False client = get_client_by_user(executor) ip_info = cc_get_ips_info_by_str(executor, biz_cc_id, target_ip_list) ip_list = [{ 'ip': _ip['InnerIP'], 'bk_cloud_id': _ip['Source'] } for _ip in ip_info['ip_result']] file_tags = [_file['tag'] for _file in local_files] push_result = file_manager.push_files_to_ips( esb_client=client, bk_biz_id=biz_cc_id, file_tags=file_tags, target_path=target_path, ips=ip_list, account=target_account, callback_url=get_node_callback_url(self.id)) if not push_result['result']: data.outputs.ex_data = push_result['message'] return False job_instance_id = push_result['data']['job_id'] data.outputs.job_inst_id = job_instance_id data.outputs.job_inst_url = get_job_instance_url( biz_cc_id, job_instance_id) return True
def execute(self, data, parent_data): executor = parent_data.get_one_of_inputs('executor') client = get_client_by_user(executor) if parent_data.get_one_of_inputs('language'): setattr(client, 'language', parent_data.get_one_of_inputs('language')) translation.activate(parent_data.get_one_of_inputs('language')) biz_cc_id = data.get_one_of_inputs('biz_cc_id', parent_data.inputs.biz_cc_id) original_ip_list = data.get_one_of_inputs('job_ip_list') ip_info = cc_get_ips_info_by_str(username=executor, biz_cc_id=biz_cc_id, ip_str=original_ip_list, use_cache=False) ip_list = [{ 'ip': _ip['InnerIP'], 'bk_cloud_id': _ip['Source'] } for _ip in ip_info['ip_result']] job_kwargs = { 'bk_biz_id': biz_cc_id, 'script_timeout': data.get_one_of_inputs('job_script_timeout'), 'account': data.get_one_of_inputs('job_account'), 'ip_list': ip_list, 'bk_callback_url': get_node_callback_url(self.id) } script_param = data.get_one_of_inputs('job_script_param') if script_param: job_kwargs.update({ 'script_param': base64.b64encode(script_param.encode('utf-8')) }) script_source = data.get_one_of_inputs('job_script_source') if script_source in ["general", "public"]: job_kwargs.update({ "script_id": data.get_one_of_inputs('job_script_list_%s' % script_source) }) else: job_kwargs.update({ 'script_type': data.get_one_of_inputs('job_script_type'), 'script_content': base64.b64encode( data.get_one_of_inputs('job_content').encode('utf-8')), }) job_result = client.job.fast_execute_script(job_kwargs) LOGGER.info('job_result: {result}, job_kwargs: {kwargs}'.format( result=job_result, kwargs=job_kwargs)) if job_result['result']: job_instance_id = job_result['data']['job_instance_id'] data.outputs.job_inst_id = job_instance_id data.outputs.job_inst_name = job_result['data'][ 'job_instance_name'] data.outputs.job_inst_url = get_job_instance_url( biz_cc_id, job_instance_id) data.outputs.client = client return True else: message = job_handle_api_error('job.fast_execute_script', job_kwargs, job_result) self.logger.error(message) data.outputs.ex_data = message return False
def execute(self, data, parent_data): executor = parent_data.get_one_of_inputs("executor") client = get_client_by_user(executor) if parent_data.get_one_of_inputs("language"): setattr(client, "language", parent_data.get_one_of_inputs("language")) translation.activate(parent_data.get_one_of_inputs("language")) biz_cc_id = data.get_one_of_inputs("biz_cc_id", parent_data.inputs.biz_cc_id) script_source = data.get_one_of_inputs("job_script_source") across_biz = data.get_one_of_inputs("job_across_biz", False) original_ip_list = data.get_one_of_inputs("job_ip_list") ip_is_exist = data.get_one_of_inputs("ip_is_exist") custom_task_name = data.get_one_of_inputs("custom_task_name", "") # 获取 IP clean_result, ip_list = get_biz_ip_from_frontend( original_ip_list, executor, biz_cc_id, data, self.logger, across_biz, ip_is_exist=ip_is_exist) if not clean_result: return False job_kwargs = { "bk_scope_type": JobBizScopeType.BIZ.value, "bk_scope_id": str(biz_cc_id), "bk_biz_id": biz_cc_id, "script_timeout": data.get_one_of_inputs("job_script_timeout"), "account": data.get_one_of_inputs("job_account"), "ip_list": ip_list, "bk_callback_url": get_node_callback_url(self.root_pipeline_id, self.id, getattr(self, "version", "")), } if custom_task_name.strip(): job_kwargs.update({"task_name": custom_task_name}) script_param = str(data.get_one_of_inputs("job_script_param")) if script_param: job_kwargs.update({ "script_param": base64.b64encode(script_param.encode("utf-8")).decode("utf-8") }) if script_source in ["general", "public"]: script_name = data.get_one_of_inputs( "job_script_list_{}".format(script_source)) kwargs = {"script_name": script_name} if script_source == "general": kwargs.update({ "bk_scope_type": JobBizScopeType.BIZ.value, "bk_scope_id": str(biz_cc_id), "bk_biz_id": biz_cc_id, }) scripts = client.job.get_script_list(kwargs) else: scripts = client.job.get_public_script_list(kwargs) if scripts["result"] is False: api_name = "job.get_script_list" if script_source == "general" else "job.get_public_script_list" message = job_handle_api_error(api_name, job_kwargs, scripts) self.logger.error(message) data.outputs.ex_data = message return False # job V2接口使用的是模糊匹配,这里需要做一次精确匹配 script_list = scripts["data"]["data"] selected_script = None for script in script_list: if script["name"] == script_name: selected_script = script break if not selected_script: api_name = "job.get_script_list" if script_source == "general" else "job.get_public_script_list" message = job_handle_api_error(api_name, job_kwargs, scripts) message += "Data validation error: can not find a script exactly named {}".format( script_name) self.logger.error(message) data.outputs.ex_data = message return False script_id = selected_script["id"] job_kwargs.update({"script_id": script_id}) else: job_kwargs.update({ "script_type": data.get_one_of_inputs("job_script_type"), "script_content": base64.b64encode( data.get_one_of_inputs("job_content").encode( "utf-8")).decode("utf-8"), }) job_result = client.job.fast_execute_script(job_kwargs) self.logger.info("job_result: {result}, job_kwargs: {kwargs}".format( result=job_result, kwargs=job_kwargs)) if job_result["result"]: job_instance_id = job_result["data"]["job_instance_id"] data.outputs.job_inst_id = job_instance_id data.outputs.job_inst_name = job_result["data"][ "job_instance_name"] data.outputs.job_inst_url = get_job_instance_url( biz_cc_id, job_instance_id) data.outputs.client = client return True else: message = job_handle_api_error("job.fast_execute_script", job_kwargs, job_result) self.logger.error(message) data.outputs.ex_data = message return False
def execute(self, data, parent_data): executor = parent_data.get_one_of_inputs("executor") client = get_client_by_user(executor) if parent_data.get_one_of_inputs("language"): setattr(client, "language", parent_data.get_one_of_inputs("language")) translation.activate(parent_data.get_one_of_inputs("language")) biz_cc_id = data.get_one_of_inputs("biz_cc_id", parent_data.inputs.biz_cc_id) original_source_files = deepcopy(data.get_one_of_inputs("job_source_files", [])) across_biz = data.get_one_of_inputs("job_across_biz", False) upload_speed_limit = data.get_one_of_inputs("upload_speed_limit") download_speed_limit = data.get_one_of_inputs("download_speed_limit") job_timeout = data.get_one_of_inputs("job_timeout") file_source = [] for item in original_source_files: clean_source_ip_result, source_ip_list = get_biz_ip_from_frontend( item["ip"], executor, biz_cc_id, data, self.logger, across_biz ) if not clean_source_ip_result: return False file_source.append( { "files": [_file.strip() for _file in item["files"].split("\n") if _file.strip()], "ip_list": source_ip_list, "account": loose_strip(item["account"]), } ) select_method = data.get_one_of_inputs("select_method") break_line = data.get_one_of_inputs("break_line") or "," job_dispatch_attr = data.get_one_of_inputs("job_dispatch_attr") attr_list = [] for attr in job_dispatch_attr: # 如果用户选择了单行扩展 if select_method == "auto": chunk_result = chunk_table_data(attr, break_line) if not chunk_result["result"]: data.set_outputs("ex_data", chunk_result["message"]) return False attr_list.extend(chunk_result["data"]) else: # 非单行扩展的情况无需处理 attr_list.append(attr) # 拼装参数列表 params_list = [] for source in file_source: for attr in attr_list: # 将[FILESRCIP]替换成源IP job_target_path = attr["job_target_path"].replace("[FILESRCIP]", source["ip_list"][0]["ip"]).strip() # 获取目标IP original_ip_list = attr["job_ip_list"] clean_result, ip_list = get_biz_ip_from_frontend( original_ip_list, executor, biz_cc_id, data, self.logger, across_biz ) if not clean_result: return False job_kwargs = { "bk_scope_type": JobBizScopeType.BIZ.value, "bk_scope_id": str(biz_cc_id), "bk_biz_id": biz_cc_id, "file_source": [source], "ip_list": ip_list, "account": attr["job_account"], "file_target_path": job_target_path, } if upload_speed_limit: job_kwargs["upload_speed_limit"] = int(upload_speed_limit) if download_speed_limit: job_kwargs["download_speed_limit"] = int(download_speed_limit) if job_timeout: job_kwargs["timeout"] = int(job_timeout) params_list.append(job_kwargs) task_count = len(params_list) # 并发请求接口 job_result_list = batch_execute_func(client.job.fast_push_file, params_list, interval_enabled=True) job_instance_id_list, job_inst_name, job_inst_url = [], [], [] data.outputs.requests_error = "" for index, res in enumerate(job_result_list): job_result = res["result"] if job_result["result"]: job_instance_id_list.append(job_result["data"]["job_instance_id"]) job_inst_name.append(job_result["data"]["job_instance_name"]) job_inst_url.append(get_job_instance_url(biz_cc_id, job_instance_id_list)) else: message = job_handle_api_error("job.fast_push_file", params_list[index], job_result) self.logger.error(message) data.outputs.requests_error += "{}\n".format(message) if data.outputs.requests_error: data.outputs.requests_error = "Request Error:\n{}".format(data.outputs.requests_error) # 总任务数 data.outputs.task_count = task_count data.outputs.job_instance_id_list = job_instance_id_list # 批量请求使用 data.outputs.job_id_of_batch_execute = job_instance_id_list data.outputs.job_inst_url = [get_job_instance_url(biz_cc_id, job_id) for job_id in job_instance_id_list] # 请求成功数 data.outputs.request_success_count = len(job_instance_id_list) # 执行成功数 data.outputs.success_count = 0 # 所有请求都失败,则返回 if not data.outputs.request_success_count: data.outputs.ex_data = data.outputs.requests_error return False data.outputs.final_res = task_count == len(job_result_list) return True
def execute(self, data, parent_data): executor = parent_data.get_one_of_inputs("executor") client = get_client_by_user(executor) ip_is_exist = data.get_one_of_inputs("ip_is_exist") if parent_data.get_one_of_inputs("language"): setattr(client, "language", parent_data.get_one_of_inputs("language")) translation.activate(parent_data.get_one_of_inputs("language")) biz_cc_id = data.get_one_of_inputs("biz_cc_id", parent_data.inputs.biz_cc_id) original_ip_list = data.get_one_of_inputs("job_ip_list") # 获取IP clean_result, ip_list = get_biz_ip_from_frontend( original_ip_list, executor, biz_cc_id, data, self.logger, is_across=False, ip_is_exist=ip_is_exist) if not clean_result: return False job_kwargs = { "bk_scope_type": JobBizScopeType.BIZ.value, "bk_scope_id": str(biz_cc_id), "bk_biz_id": biz_cc_id, "script_timeout": data.get_one_of_inputs("job_script_timeout"), "account": data.get_one_of_inputs("job_account"), "ip_list": ip_list, "bk_callback_url": get_node_callback_url(self.root_pipeline_id, self.id, getattr(self, "version", "")), } script_param = str(data.get_one_of_inputs("job_script_param")) if script_param: job_kwargs.update({ "script_param": base64.b64encode(script_param.encode("utf-8")).decode("utf-8") }) script_source = data.get_one_of_inputs("job_script_source") if script_source in ["general", "public"]: job_kwargs.update({ "script_id": data.get_one_of_inputs("job_script_list_%s" % script_source) }) else: job_kwargs.update({ "script_type": data.get_one_of_inputs("job_script_type"), "script_content": base64.b64encode( data.get_one_of_inputs("job_content").encode( "utf-8")).decode("utf-8"), }) job_result = client.job.fast_execute_script(job_kwargs) self.logger.info("job_result: {result}, job_kwargs: {kwargs}".format( result=job_result, kwargs=job_kwargs)) if job_result["result"]: job_instance_id = job_result["data"]["job_instance_id"] data.outputs.job_inst_id = job_instance_id data.outputs.job_inst_name = job_result["data"][ "job_instance_name"] data.outputs.job_inst_url = get_job_instance_url( biz_cc_id, job_instance_id) data.outputs.client = client return True else: message = job_handle_api_error("job.fast_execute_script", job_kwargs, job_result) self.logger.error(message) data.outputs.ex_data = message return False
def execute(self, data, parent_data): executor = parent_data.get_one_of_inputs("executor") client = get_client_by_user(executor) client.set_bk_api_ver("v2") if parent_data.get_one_of_inputs("language"): setattr(client, "language", parent_data.get_one_of_inputs("language")) translation.activate(parent_data.get_one_of_inputs("language")) biz_cc_id = data.get_one_of_inputs("biz_cc_id", parent_data.inputs.biz_cc_id) original_global_var = deepcopy(data.get_one_of_inputs("job_global_var")) global_vars = [] ip_is_exist = data.get_one_of_inputs("ip_is_exist") biz_across = data.get_one_of_inputs("biz_across") for _value in original_global_var: val = loose_strip(_value["value"]) # category为3,表示变量类型为IP if _value["category"] == 3: if biz_across: result, ip_list = get_biz_ip_from_frontend( ip_str=val, executor=executor, biz_cc_id=biz_cc_id, data=data, logger_handle=self.logger, is_across=True, ip_is_exist=ip_is_exist, ignore_ex_data=True, ) # 匹配不到云区域IP格式IP,尝试从当前业务下获取 if not result: result, ip_list = get_biz_ip_from_frontend( ip_str=val, executor=executor, biz_cc_id=biz_cc_id, data=data, logger_handle=self.logger, is_across=False, ip_is_exist=ip_is_exist, ) if not result: return False else: result, ip_list = get_biz_ip_from_frontend( ip_str=val, executor=executor, biz_cc_id=biz_cc_id, data=data, logger_handle=self.logger, is_across=False, ip_is_exist=ip_is_exist, ) if not result: return False if ip_list: global_vars.append({"name": _value["name"], "ip_list": ip_list}) else: global_vars.append({"name": _value["name"], "value": val}) job_kwargs = { "bk_scope_type": self.biz_scope_type, "bk_scope_id": str(biz_cc_id), "bk_biz_id": biz_cc_id, "bk_job_id": data.get_one_of_inputs("job_task_id"), "global_vars": global_vars, "bk_callback_url": get_node_callback_url(self.root_pipeline_id, self.id, getattr(self, "version", "")), } job_result = client.job.execute_job(job_kwargs) self.logger.info("job_result: {result}, job_kwargs: {kwargs}".format(result=job_result, kwargs=job_kwargs)) if job_result["result"]: job_instance_id = job_result["data"]["job_instance_id"] data.outputs.job_inst_url = get_job_instance_url(biz_cc_id, job_instance_id) data.outputs.job_inst_id = job_instance_id data.outputs.job_inst_name = job_result["data"]["job_instance_name"] data.outputs.client = client return True else: message = job_handle_api_error("job.execute_job", job_kwargs, job_result) self.logger.error(message) data.outputs.ex_data = message return False
def execute(self, data, parent_data): executor = parent_data.inputs.executor biz_cc_id = data.inputs.biz_cc_id local_files_and_target_path = data.inputs.job_local_files_info[ "job_push_multi_local_files_table"] target_ip_list = data.inputs.job_target_ip_list target_account = data.inputs.job_target_account across_biz = data.get_one_of_inputs("job_across_biz", False) job_timeout = data.get_one_of_inputs("job_timeout") task_count = len(local_files_and_target_path) file_manager_type = EnvironmentVariables.objects.get_var( "BKAPP_FILE_MANAGER_TYPE") if not file_manager_type: data.outputs.ex_data = "File Manager configuration error, contact administrator please." return False try: file_manager = ManagerFactory.get_manager( manager_type=file_manager_type) except Exception as e: err_msg = "can not get file manager for type: {}\n err: {}" self.logger.error( err_msg.format(file_manager_type, traceback.format_exc())) data.outputs.ex_data = err_msg.format(file_manager_type, e) return False client = get_client_by_user(executor) # filter 跨业务 IP clean_result, ip_list = get_biz_ip_from_frontend( target_ip_list, executor, biz_cc_id, data, self.logger, across_biz) if not clean_result: return False params_list = [{ "esb_client": client, "bk_biz_id": biz_cc_id, "file_tags": [ _file["response"]["tag"] for _file in push_files_info["file_info"] if _file["response"]["result"] is True ], "target_path": push_files_info["target_path"], "ips": ip_list, "account": target_account, } for push_files_info in local_files_and_target_path] if job_timeout: for param in params_list: param["timeout"] = int(job_timeout) # 批量上传请求 if len(params_list) == task_count: push_results = batch_execute_func(file_manager.push_files_to_ips, params_list, interval_enabled=True) else: data.outputs.ex_data = _("执行参数为空,请确认") return False # 校验请求结果 job_instance_id_list = [] data.outputs.requests_error = "" for push_object in push_results: push_result = push_object["result"] if not push_result["result"]: err_message = job_handle_api_error(push_result["job_api"], push_result["kwargs"], push_result["response"]) self.logger.error(err_message) data.outputs.requests_error += "{}\n".format(err_message) else: job_instance_id_list.append(push_result["data"]["job_id"]) if data.outputs.requests_error: data.outputs.requests_error = "Request Error:\n{}".format( data.outputs.requests_error) data.outputs.job_instance_id_list = job_instance_id_list # 批量请求使用 data.outputs.job_id_of_batch_execute = job_instance_id_list data.outputs.job_inst_url = [ get_job_instance_url(biz_cc_id, job_id) for job_id in job_instance_id_list ] # 总任务数 data.outputs.task_count = task_count # 请求成功数 data.outputs.request_success_count = len(job_instance_id_list) # 执行成功数 data.outputs.success_count = 0 # 所有请求都失败,则返回 if not data.outputs.request_success_count: data.outputs.ex_data = data.outputs.requests_error return False # 任务结果 data.outputs.final_res = task_count == len(job_instance_id_list) return True
def execute(self, data, parent_data): executor = parent_data.get_one_of_inputs("executor") client = get_client_by_user(executor) if parent_data.get_one_of_inputs("language"): setattr(client, "language", parent_data.get_one_of_inputs("language")) translation.activate(parent_data.get_one_of_inputs("language")) biz_cc_id = int(data.get_one_of_inputs("all_biz_cc_id")) data.inputs.biz_cc_id = biz_cc_id upload_speed_limit = data.get_one_of_inputs("upload_speed_limit") download_speed_limit = data.get_one_of_inputs("download_speed_limit") job_timeout = data.get_one_of_inputs("job_timeout") file_source = [{ "files": [ _file.strip() for _file in item["files"].split("\n") if _file.strip() ], "ip_list": [{ "ip": item["ip"], "bk_cloud_id": int(item["bk_cloud_id"]) if item["bk_cloud_id"] else 0 }], "account": loose_strip(item["account"]), } for item in data.get_one_of_inputs("job_source_files", [])] # 拼装参数列表 params_list = [] for source in file_source: for attr in data.get_one_of_inputs("job_dispatch_attr"): job_account = attr["job_target_account"] job_target_path = attr["job_target_path"] ip_list = [{ "ip": _ip, "bk_cloud_id": int(attr["bk_cloud_id"]) if attr["bk_cloud_id"] else 0 } for _ip in get_ip_by_regex(attr["job_ip_list"])] job_kwargs = { "bk_scope_type": JobBizScopeType.BIZ_SET.value, "bk_scope_id": str(biz_cc_id), "bk_biz_id": biz_cc_id, "file_source": [source], "ip_list": ip_list, "account": job_account, "file_target_path": job_target_path, } if upload_speed_limit: job_kwargs["upload_speed_limit"] = int(upload_speed_limit) if download_speed_limit: job_kwargs["download_speed_limit"] = int( download_speed_limit) if job_timeout: job_kwargs["timeout"] = int(job_timeout) params_list.append(job_kwargs) task_count = len(params_list) # 并发请求接口 job_result_list = batch_execute_func(client.job.fast_push_file, params_list, interval_enabled=True) job_instance_id_list, job_inst_name, job_inst_url = [], [], [] data.outputs.requests_error = "" for index, res in enumerate(job_result_list): job_result = res["result"] if job_result["result"]: job_instance_id_list.append( job_result["data"]["job_instance_id"]) job_inst_name.append(job_result["data"]["job_instance_name"]) job_inst_url.append( get_job_instance_url(biz_cc_id, job_instance_id_list)) else: message = job_handle_api_error("job.fast_push_file", params_list[index], job_result) self.logger.error(message) data.outputs.requests_error += "{}\n".format(message) if data.outputs.requests_error: data.outputs.requests_error = "Request Error:\n{}".format( data.outputs.requests_error) # 总任务数 data.outputs.task_count = task_count data.outputs.job_instance_id_list = job_instance_id_list # 批量请求使用 data.outputs.job_id_of_batch_execute = job_instance_id_list data.outputs.job_inst_url = [ get_job_instance_url(biz_cc_id, job_id) for job_id in job_instance_id_list ] # 请求成功数 data.outputs.request_success_count = len(job_instance_id_list) # 执行成功数 data.outputs.success_count = 0 # 所有请求都失败,则返回 if not data.outputs.request_success_count: data.outputs.ex_data = data.outputs.requests_error return False data.outputs.final_res = task_count == len(job_instance_id_list) return True
def execute(self, data, parent_data): executor = parent_data.inputs.executor biz_cc_id = data.inputs.biz_cc_id local_files = data.inputs.job_local_files target_ip_list = data.inputs.job_target_ip_list target_account = data.inputs.job_target_account target_path = data.inputs.job_target_path across_biz = data.get_one_of_inputs("job_across_biz", False) file_manager_type = EnvironmentVariables.objects.get_var( "BKAPP_FILE_MANAGER_TYPE") if not file_manager_type: data.outputs.ex_data = "File Manager configuration error, contact administrator please." return False try: file_manager = ManagerFactory.get_manager( manager_type=file_manager_type) except Exception as e: err_msg = "can not get file manager for type: {}\n err: {}" self.logger.error( err_msg.format(file_manager_type, traceback.format_exc())) data.outputs.ex_data = err_msg.format(file_manager_type, e) return False client = get_client_by_user(executor) # filter 跨业务 IP clean_result, ip_list = get_biz_ip_from_frontend( target_ip_list, executor, biz_cc_id, data, self.logger, across_biz) if not clean_result: return False # 这里自动过滤掉上传失败的文件 file_tags = [ _file["response"]["tag"] for _file in local_files if _file["response"]["result"] is True ] push_result = file_manager.push_files_to_ips( esb_client=client, bk_biz_id=biz_cc_id, file_tags=file_tags, target_path=target_path, ips=ip_list, account=target_account, callback_url=get_node_callback_url(self.root_pipeline_id, self.id, getattr(self, "version", "")), ) if not push_result["result"]: err_message = job_handle_api_error(push_result["job_api"], push_result["kwargs"], push_result["response"]) self.logger.error(err_message) data.outputs.ex_data = err_message return False job_instance_id = push_result["data"]["job_id"] data.outputs.job_inst_id = job_instance_id data.outputs.job_inst_url = get_job_instance_url( biz_cc_id, job_instance_id) return True
def execute(self, data, parent_data): executor = parent_data.get_one_of_inputs("executor") client = get_client_by_user(executor) client.set_bk_api_ver("v2") if parent_data.get_one_of_inputs("language"): setattr(client, "language", parent_data.get_one_of_inputs("language")) translation.activate(parent_data.get_one_of_inputs("language")) biz_cc_id = data.get_one_of_inputs("biz_cc_id", parent_data.inputs.biz_cc_id) original_source_files = deepcopy( data.get_one_of_inputs("job_source_files", [])) across_biz = data.get_one_of_inputs("job_across_biz", False) ip_is_exist = data.get_one_of_inputs("ip_is_exist") file_source = [] for item in original_source_files: # filter 跨业务 IP clean_source_ip_result, source_ip_list = get_biz_ip_from_frontend( item["ip"], executor, biz_cc_id, data, self.logger, across_biz) if not clean_source_ip_result: return False file_source.append({ "files": [ _file.strip() for _file in item["files"].split("\n") if _file.strip() ], "ip_list": source_ip_list, "account": loose_strip(item["account"]), }) # 获取目标IP original_ip_list = data.get_one_of_inputs("job_ip_list") clean_result, ip_list = get_biz_ip_from_frontend( original_ip_list, executor, biz_cc_id, data, self.logger, is_across=False, ip_is_exist=ip_is_exist) if not clean_result: return False job_timeout = data.get_one_of_inputs("job_timeout") job_kwargs = { "bk_scope_type": JobBizScopeType.BIZ.value, "bk_scope_id": str(biz_cc_id), "bk_biz_id": biz_cc_id, "file_source": file_source, "ip_list": ip_list, "account": data.get_one_of_inputs("job_account"), "file_target_path": data.get_one_of_inputs("job_target_path"), "bk_callback_url": get_node_callback_url(self.root_pipeline_id, self.id, getattr(self, "version", "")), } if job_timeout: job_kwargs["timeout"] = int(job_timeout) job_result = client.job.fast_push_file(job_kwargs) self.logger.info("job_result: {result}, job_kwargs: {kwargs}".format( result=job_result, kwargs=job_kwargs)) if job_result["result"]: job_instance_id = job_result["data"]["job_instance_id"] data.outputs.job_inst_id = job_instance_id data.outputs.job_inst_name = job_result["data"][ "job_instance_name"] data.outputs.job_inst_url = get_job_instance_url( biz_cc_id, job_instance_id) data.outputs.client = client return True else: message = job_handle_api_error("job.fast_push_file", job_kwargs, job_result) self.logger.error(message) data.outputs.ex_data = message return False