def execute(self, data, parent_data): executor = parent_data.get_one_of_inputs('executor') biz_cc_id = parent_data.get_one_of_inputs('biz_cc_id') client = get_client_by_user(executor) client.set_bk_api_ver('v2') if parent_data.get_one_of_inputs('language'): setattr(client, 'language', parent_data.get_one_of_inputs('language')) translation.activate(parent_data.get_one_of_inputs('language')) original_global_var = data.get_one_of_inputs('job_global_var') global_vars = [] for _value in original_global_var: # 1-字符串,2-IP if _value['type'] == 2: var_ip = cc_get_ips_info_by_str(username=executor, biz_cc_id=biz_cc_id, ip_str=_value['value'], use_cache=False) ip_list = [{ 'ip': _ip['InnerIP'], 'bk_cloud_id': _ip['Source'] } for _ip in var_ip['ip_result']] if _value['value'].strip() and not ip_list: data.outputs.ex_data = _( u"无法从配置平台(CMDB)查询到对应 IP,请确认输入的 IP 是否合法") return False if ip_list: global_vars.append({ 'name': _value['name'], 'ip_list': ip_list, }) else: global_vars.append({ 'name': _value['name'], 'value': str(_value['value']).strip(), }) job_kwargs = { 'bk_biz_id': biz_cc_id, 'bk_job_id': data.get_one_of_inputs('job_task_id'), 'global_vars': global_vars, 'bk_callback_url': get_node_callback_url(self.id) } job_result = client.job.execute_job(job_kwargs) LOGGER.info('job_result: {result}, job_kwargs: {kwargs}'.format( result=job_result, kwargs=job_kwargs)) if job_result['result']: job_instance_id = job_result['data']['job_instance_id'] data.outputs.job_inst_url = get_job_instance_url( parent_data.inputs.biz_cc_id, job_instance_id) data.outputs.job_inst_id = job_instance_id data.outputs.job_inst_name = job_result['data'][ 'job_instance_name'] data.outputs.client = client return True else: data.outputs.ex_data = job_result['message'] return False
def execute(self, data, parent_data): executor = parent_data.get_one_of_inputs('executor') biz_cc_id = parent_data.get_one_of_inputs('biz_cc_id') client = get_client_by_user(executor) client.set_bk_api_ver('v2') if parent_data.get_one_of_inputs('language'): setattr(client, 'language', parent_data.get_one_of_inputs('language')) translation.activate(parent_data.get_one_of_inputs('language')) original_source_files = data.get_one_of_inputs('job_source_files', []) file_source = [] for item in original_source_files: ip_info = cc_get_ips_info_by_str(username=executor, biz_cc_id=biz_cc_id, ip_str=item['ip'], use_cache=False) file_source.append({ 'files': str(item['files']).strip().split("\n"), 'ip_list': [{ 'ip': _ip['InnerIP'], 'bk_cloud_id': _ip['Source'] } for _ip in ip_info['ip_result']], 'account': str(item['account']).strip(), }) original_ip_list = data.get_one_of_inputs('job_ip_list') ip_info = cc_get_ips_info_by_str(executor, biz_cc_id, original_ip_list) ip_list = [{ 'ip': _ip['InnerIP'], 'bk_cloud_id': _ip['Source'] } for _ip in ip_info['ip_result']] job_kwargs = { 'bk_biz_id': biz_cc_id, 'file_source': file_source, 'ip_list': ip_list, 'account': data.get_one_of_inputs('job_account'), 'file_target_path': data.get_one_of_inputs('job_target_path'), 'bk_callback_url': get_node_callback_url(self.id) } job_result = client.job.fast_push_file(job_kwargs) LOGGER.info('job_result: {result}, job_kwargs: {kwargs}'.format( result=job_result, kwargs=job_kwargs)) if job_result['result']: job_instance_id = job_result['data']['job_instance_id'] data.outputs.job_inst_id = job_instance_id data.outputs.job_inst_name = job_result['data'][ 'job_instance_name'] data.outputs.job_inst_url = get_job_instance_url( parent_data.inputs.biz_cc_id, job_instance_id) data.outputs.client = client return True else: data.outputs.ex_data = job_result['message'] return False
def execute(self, data, parent_data): executor = parent_data.get_one_of_inputs("executor") client = get_client_by_user(executor) client.set_bk_api_ver("v2") if parent_data.get_one_of_inputs("language"): setattr(client, "language", parent_data.get_one_of_inputs("language")) translation.activate(parent_data.get_one_of_inputs("language")) biz_cc_id = data.get_one_of_inputs("biz_cc_id", parent_data.inputs.biz_cc_id) original_source_files = deepcopy(data.get_one_of_inputs("job_source_files", [])) file_source = [] for item in original_source_files: ip_info = cc_get_ips_info_by_str( username=executor, biz_cc_id=biz_cc_id, ip_str=item["ip"], use_cache=False, ) file_source.append( { "files": [_file.strip() for _file in item["files"].split("\n") if _file.strip()], "ip_list": [{"ip": _ip["InnerIP"], "bk_cloud_id": _ip["Source"]} for _ip in ip_info["ip_result"]], "account": loose_strip(item["account"]), } ) original_ip_list = data.get_one_of_inputs("job_ip_list") ip_info = cc_get_ips_info_by_str(executor, biz_cc_id, original_ip_list) ip_list = [{"ip": _ip["InnerIP"], "bk_cloud_id": _ip["Source"]} for _ip in ip_info["ip_result"]] job_timeout = data.get_one_of_inputs("job_timeout") job_kwargs = { "bk_scope_type": JobBizScopeType.BIZ.value, "bk_scope_id": str(biz_cc_id), "bk_biz_id": biz_cc_id, "file_source": file_source, "ip_list": ip_list, "account": data.get_one_of_inputs("job_account"), "file_target_path": data.get_one_of_inputs("job_target_path"), "bk_callback_url": get_node_callback_url(self.root_pipeline_id, self.id, getattr(self, "version", "")), } if job_timeout: job_kwargs["timeout"] = int(job_timeout) job_result = client.job.fast_push_file(job_kwargs) self.logger.info("job_result: {result}, job_kwargs: {kwargs}".format(result=job_result, kwargs=job_kwargs)) if job_result["result"]: job_instance_id = job_result["data"]["job_instance_id"] data.outputs.job_inst_id = job_instance_id data.outputs.job_inst_name = job_result["data"]["job_instance_name"] data.outputs.job_inst_url = get_job_instance_url(biz_cc_id, job_instance_id) data.outputs.client = client return True else: message = job_handle_api_error("job.fast_push_file", job_kwargs, job_result) self.logger.error(message) data.outputs.ex_data = message return False
def execute(self, data, parent_data): executor = parent_data.inputs.executor biz_cc_id = data.inputs.biz_cc_id local_files = data.inputs.job_local_files target_ip_list = data.inputs.job_target_ip_list target_account = data.inputs.job_target_account target_path = data.inputs.job_target_path file_manager_type = EnvironmentVariables.objects.get_var( 'BKAPP_FILE_MANAGER_TYPE') if not file_manager_type: data.outputs.ex_data = 'File Manager configuration error, contact administrator please.' return False try: file_manager = ManagerFactory.get_manager( manager_type=file_manager_type) except Exception as e: err_msg = 'can not get file manager for type: {}\n err: {}' self.logger.error( err_msg.format(file_manager_type, traceback.format_exc())) data.outputs.ex_data = err_msg.format(file_manager_type, e) return False client = get_client_by_user(executor) ip_info = cc_get_ips_info_by_str(executor, biz_cc_id, target_ip_list) ip_list = [{ 'ip': _ip['InnerIP'], 'bk_cloud_id': _ip['Source'] } for _ip in ip_info['ip_result']] file_tags = [_file['tag'] for _file in local_files] push_result = file_manager.push_files_to_ips( esb_client=client, bk_biz_id=biz_cc_id, file_tags=file_tags, target_path=target_path, ips=ip_list, account=target_account, callback_url=get_node_callback_url(self.id)) if not push_result['result']: data.outputs.ex_data = push_result['message'] return False job_instance_id = push_result['data']['job_id'] data.outputs.job_inst_id = job_instance_id data.outputs.job_inst_url = get_job_instance_url( biz_cc_id, job_instance_id) return True
def execute(self, data, parent_data): executor = parent_data.get_one_of_inputs("executor") client = BKItsmClient(username=executor) verifier = data.get_one_of_inputs("bk_verifier") title = data.get_one_of_inputs("bk_approve_title") approve_content = data.get_one_of_inputs("bk_approve_content") kwargs = { "creator": executor, "fields": [ { "key": "title", "value": title }, { "key": "APPROVER", "value": verifier.replace(" ", "") }, { "key": "APPROVAL_CONTENT", "value": approve_content }, ], "fast_approval": True, "meta": { "callback_url": get_node_callback_url(self.root_pipeline_id, self.id, getattr(self, "version", "")) }, } result = client.create_ticket(**kwargs) if not result["result"]: message = handle_api_error(__group_name__, "itsm.create_ticket", kwargs, result) self.logger.error(message) data.outputs.ex_data = message return False data.outputs.sn = result["data"]["sn"] return True
def execute(self, data, parent_data): executor = parent_data.get_one_of_inputs('executor') client = get_client_by_user(executor) if parent_data.get_one_of_inputs('language'): setattr(client, 'language', parent_data.get_one_of_inputs('language')) translation.activate(parent_data.get_one_of_inputs('language')) biz_cc_id = data.get_one_of_inputs('biz_cc_id', parent_data.inputs.biz_cc_id) original_ip_list = data.get_one_of_inputs('job_ip_list') ip_info = cc_get_ips_info_by_str(username=executor, biz_cc_id=biz_cc_id, ip_str=original_ip_list, use_cache=False) ip_list = [{ 'ip': _ip['InnerIP'], 'bk_cloud_id': _ip['Source'] } for _ip in ip_info['ip_result']] job_kwargs = { 'bk_biz_id': biz_cc_id, 'script_timeout': data.get_one_of_inputs('job_script_timeout'), 'account': data.get_one_of_inputs('job_account'), 'ip_list': ip_list, 'bk_callback_url': get_node_callback_url(self.id) } script_param = data.get_one_of_inputs('job_script_param') if script_param: job_kwargs.update({ 'script_param': base64.b64encode(script_param.encode('utf-8')) }) script_source = data.get_one_of_inputs('job_script_source') if script_source in ["general", "public"]: job_kwargs.update({ "script_id": data.get_one_of_inputs('job_script_list_%s' % script_source) }) else: job_kwargs.update({ 'script_type': data.get_one_of_inputs('job_script_type'), 'script_content': base64.b64encode( data.get_one_of_inputs('job_content').encode('utf-8')), }) job_result = client.job.fast_execute_script(job_kwargs) LOGGER.info('job_result: {result}, job_kwargs: {kwargs}'.format( result=job_result, kwargs=job_kwargs)) if job_result['result']: job_instance_id = job_result['data']['job_instance_id'] data.outputs.job_inst_id = job_instance_id data.outputs.job_inst_name = job_result['data'][ 'job_instance_name'] data.outputs.job_inst_url = get_job_instance_url( biz_cc_id, job_instance_id) data.outputs.client = client return True else: message = job_handle_api_error('job.fast_execute_script', job_kwargs, job_result) self.logger.error(message) data.outputs.ex_data = message return False
def execute(self, data, parent_data): executor = parent_data.get_one_of_inputs("executor") client = get_client_by_user(executor) if parent_data.get_one_of_inputs("language"): setattr(client, "language", parent_data.get_one_of_inputs("language")) translation.activate(parent_data.get_one_of_inputs("language")) biz_cc_id = data.get_one_of_inputs("biz_cc_id", parent_data.inputs.biz_cc_id) script_source = data.get_one_of_inputs("job_script_source") across_biz = data.get_one_of_inputs("job_across_biz", False) original_ip_list = data.get_one_of_inputs("job_ip_list") ip_is_exist = data.get_one_of_inputs("ip_is_exist") custom_task_name = data.get_one_of_inputs("custom_task_name", "") # 获取 IP clean_result, ip_list = get_biz_ip_from_frontend( original_ip_list, executor, biz_cc_id, data, self.logger, across_biz, ip_is_exist=ip_is_exist) if not clean_result: return False job_kwargs = { "bk_scope_type": JobBizScopeType.BIZ.value, "bk_scope_id": str(biz_cc_id), "bk_biz_id": biz_cc_id, "script_timeout": data.get_one_of_inputs("job_script_timeout"), "account": data.get_one_of_inputs("job_account"), "ip_list": ip_list, "bk_callback_url": get_node_callback_url(self.root_pipeline_id, self.id, getattr(self, "version", "")), } if custom_task_name.strip(): job_kwargs.update({"task_name": custom_task_name}) script_param = str(data.get_one_of_inputs("job_script_param")) if script_param: job_kwargs.update({ "script_param": base64.b64encode(script_param.encode("utf-8")).decode("utf-8") }) if script_source in ["general", "public"]: script_name = data.get_one_of_inputs( "job_script_list_{}".format(script_source)) kwargs = {"script_name": script_name} if script_source == "general": kwargs.update({ "bk_scope_type": JobBizScopeType.BIZ.value, "bk_scope_id": str(biz_cc_id), "bk_biz_id": biz_cc_id, }) scripts = client.job.get_script_list(kwargs) else: scripts = client.job.get_public_script_list(kwargs) if scripts["result"] is False: api_name = "job.get_script_list" if script_source == "general" else "job.get_public_script_list" message = job_handle_api_error(api_name, job_kwargs, scripts) self.logger.error(message) data.outputs.ex_data = message return False # job V2接口使用的是模糊匹配,这里需要做一次精确匹配 script_list = scripts["data"]["data"] selected_script = None for script in script_list: if script["name"] == script_name: selected_script = script break if not selected_script: api_name = "job.get_script_list" if script_source == "general" else "job.get_public_script_list" message = job_handle_api_error(api_name, job_kwargs, scripts) message += "Data validation error: can not find a script exactly named {}".format( script_name) self.logger.error(message) data.outputs.ex_data = message return False script_id = selected_script["id"] job_kwargs.update({"script_id": script_id}) else: job_kwargs.update({ "script_type": data.get_one_of_inputs("job_script_type"), "script_content": base64.b64encode( data.get_one_of_inputs("job_content").encode( "utf-8")).decode("utf-8"), }) job_result = client.job.fast_execute_script(job_kwargs) self.logger.info("job_result: {result}, job_kwargs: {kwargs}".format( result=job_result, kwargs=job_kwargs)) if job_result["result"]: job_instance_id = job_result["data"]["job_instance_id"] data.outputs.job_inst_id = job_instance_id data.outputs.job_inst_name = job_result["data"][ "job_instance_name"] data.outputs.job_inst_url = get_job_instance_url( biz_cc_id, job_instance_id) data.outputs.client = client return True else: message = job_handle_api_error("job.fast_execute_script", job_kwargs, job_result) self.logger.error(message) data.outputs.ex_data = message return False
def execute(self, data, parent_data): executor = parent_data.get_one_of_inputs("executor") client = get_client_by_user(executor) ip_is_exist = data.get_one_of_inputs("ip_is_exist") if parent_data.get_one_of_inputs("language"): setattr(client, "language", parent_data.get_one_of_inputs("language")) translation.activate(parent_data.get_one_of_inputs("language")) biz_cc_id = data.get_one_of_inputs("biz_cc_id", parent_data.inputs.biz_cc_id) original_ip_list = data.get_one_of_inputs("job_ip_list") # 获取IP clean_result, ip_list = get_biz_ip_from_frontend( original_ip_list, executor, biz_cc_id, data, self.logger, is_across=False, ip_is_exist=ip_is_exist) if not clean_result: return False job_kwargs = { "bk_scope_type": JobBizScopeType.BIZ.value, "bk_scope_id": str(biz_cc_id), "bk_biz_id": biz_cc_id, "script_timeout": data.get_one_of_inputs("job_script_timeout"), "account": data.get_one_of_inputs("job_account"), "ip_list": ip_list, "bk_callback_url": get_node_callback_url(self.root_pipeline_id, self.id, getattr(self, "version", "")), } script_param = str(data.get_one_of_inputs("job_script_param")) if script_param: job_kwargs.update({ "script_param": base64.b64encode(script_param.encode("utf-8")).decode("utf-8") }) script_source = data.get_one_of_inputs("job_script_source") if script_source in ["general", "public"]: job_kwargs.update({ "script_id": data.get_one_of_inputs("job_script_list_%s" % script_source) }) else: job_kwargs.update({ "script_type": data.get_one_of_inputs("job_script_type"), "script_content": base64.b64encode( data.get_one_of_inputs("job_content").encode( "utf-8")).decode("utf-8"), }) job_result = client.job.fast_execute_script(job_kwargs) self.logger.info("job_result: {result}, job_kwargs: {kwargs}".format( result=job_result, kwargs=job_kwargs)) if job_result["result"]: job_instance_id = job_result["data"]["job_instance_id"] data.outputs.job_inst_id = job_instance_id data.outputs.job_inst_name = job_result["data"][ "job_instance_name"] data.outputs.job_inst_url = get_job_instance_url( biz_cc_id, job_instance_id) data.outputs.client = client return True else: message = job_handle_api_error("job.fast_execute_script", job_kwargs, job_result) self.logger.error(message) data.outputs.ex_data = message return False
def execute(self, data, parent_data): executor = parent_data.get_one_of_inputs("executor") client = get_client_by_user(executor) client.set_bk_api_ver("v2") if parent_data.get_one_of_inputs("language"): setattr(client, "language", parent_data.get_one_of_inputs("language")) translation.activate(parent_data.get_one_of_inputs("language")) biz_cc_id = data.get_one_of_inputs("biz_cc_id", parent_data.inputs.biz_cc_id) original_global_var = deepcopy(data.get_one_of_inputs("job_global_var")) global_vars = [] ip_is_exist = data.get_one_of_inputs("ip_is_exist") biz_across = data.get_one_of_inputs("biz_across") for _value in original_global_var: val = loose_strip(_value["value"]) # category为3,表示变量类型为IP if _value["category"] == 3: if biz_across: result, ip_list = get_biz_ip_from_frontend( ip_str=val, executor=executor, biz_cc_id=biz_cc_id, data=data, logger_handle=self.logger, is_across=True, ip_is_exist=ip_is_exist, ignore_ex_data=True, ) # 匹配不到云区域IP格式IP,尝试从当前业务下获取 if not result: result, ip_list = get_biz_ip_from_frontend( ip_str=val, executor=executor, biz_cc_id=biz_cc_id, data=data, logger_handle=self.logger, is_across=False, ip_is_exist=ip_is_exist, ) if not result: return False else: result, ip_list = get_biz_ip_from_frontend( ip_str=val, executor=executor, biz_cc_id=biz_cc_id, data=data, logger_handle=self.logger, is_across=False, ip_is_exist=ip_is_exist, ) if not result: return False if ip_list: global_vars.append({"name": _value["name"], "ip_list": ip_list}) else: global_vars.append({"name": _value["name"], "value": val}) job_kwargs = { "bk_scope_type": self.biz_scope_type, "bk_scope_id": str(biz_cc_id), "bk_biz_id": biz_cc_id, "bk_job_id": data.get_one_of_inputs("job_task_id"), "global_vars": global_vars, "bk_callback_url": get_node_callback_url(self.root_pipeline_id, self.id, getattr(self, "version", "")), } job_result = client.job.execute_job(job_kwargs) self.logger.info("job_result: {result}, job_kwargs: {kwargs}".format(result=job_result, kwargs=job_kwargs)) if job_result["result"]: job_instance_id = job_result["data"]["job_instance_id"] data.outputs.job_inst_url = get_job_instance_url(biz_cc_id, job_instance_id) data.outputs.job_inst_id = job_instance_id data.outputs.job_inst_name = job_result["data"]["job_instance_name"] data.outputs.client = client return True else: message = job_handle_api_error("job.execute_job", job_kwargs, job_result) self.logger.error(message) data.outputs.ex_data = message return False
def execute(self, data, parent_data): executor = parent_data.inputs.executor biz_cc_id = data.inputs.biz_cc_id local_files = data.inputs.job_local_files target_ip_list = data.inputs.job_target_ip_list target_account = data.inputs.job_target_account target_path = data.inputs.job_target_path across_biz = data.get_one_of_inputs("job_across_biz", False) file_manager_type = EnvironmentVariables.objects.get_var( "BKAPP_FILE_MANAGER_TYPE") if not file_manager_type: data.outputs.ex_data = "File Manager configuration error, contact administrator please." return False try: file_manager = ManagerFactory.get_manager( manager_type=file_manager_type) except Exception as e: err_msg = "can not get file manager for type: {}\n err: {}" self.logger.error( err_msg.format(file_manager_type, traceback.format_exc())) data.outputs.ex_data = err_msg.format(file_manager_type, e) return False client = get_client_by_user(executor) # filter 跨业务 IP clean_result, ip_list = get_biz_ip_from_frontend( target_ip_list, executor, biz_cc_id, data, self.logger, across_biz) if not clean_result: return False # 这里自动过滤掉上传失败的文件 file_tags = [ _file["response"]["tag"] for _file in local_files if _file["response"]["result"] is True ] push_result = file_manager.push_files_to_ips( esb_client=client, bk_biz_id=biz_cc_id, file_tags=file_tags, target_path=target_path, ips=ip_list, account=target_account, callback_url=get_node_callback_url(self.root_pipeline_id, self.id, getattr(self, "version", "")), ) if not push_result["result"]: err_message = job_handle_api_error(push_result["job_api"], push_result["kwargs"], push_result["response"]) self.logger.error(err_message) data.outputs.ex_data = err_message return False job_instance_id = push_result["data"]["job_id"] data.outputs.job_inst_id = job_instance_id data.outputs.job_inst_url = get_job_instance_url( biz_cc_id, job_instance_id) return True
def execute(self, data, parent_data): executor = parent_data.get_one_of_inputs("executor") client = get_client_by_user(executor) client.set_bk_api_ver("v2") if parent_data.get_one_of_inputs("language"): setattr(client, "language", parent_data.get_one_of_inputs("language")) translation.activate(parent_data.get_one_of_inputs("language")) biz_cc_id = data.get_one_of_inputs("biz_cc_id", parent_data.inputs.biz_cc_id) original_source_files = deepcopy( data.get_one_of_inputs("job_source_files", [])) across_biz = data.get_one_of_inputs("job_across_biz", False) ip_is_exist = data.get_one_of_inputs("ip_is_exist") file_source = [] for item in original_source_files: # filter 跨业务 IP clean_source_ip_result, source_ip_list = get_biz_ip_from_frontend( item["ip"], executor, biz_cc_id, data, self.logger, across_biz) if not clean_source_ip_result: return False file_source.append({ "files": [ _file.strip() for _file in item["files"].split("\n") if _file.strip() ], "ip_list": source_ip_list, "account": loose_strip(item["account"]), }) # 获取目标IP original_ip_list = data.get_one_of_inputs("job_ip_list") clean_result, ip_list = get_biz_ip_from_frontend( original_ip_list, executor, biz_cc_id, data, self.logger, is_across=False, ip_is_exist=ip_is_exist) if not clean_result: return False job_timeout = data.get_one_of_inputs("job_timeout") job_kwargs = { "bk_scope_type": JobBizScopeType.BIZ.value, "bk_scope_id": str(biz_cc_id), "bk_biz_id": biz_cc_id, "file_source": file_source, "ip_list": ip_list, "account": data.get_one_of_inputs("job_account"), "file_target_path": data.get_one_of_inputs("job_target_path"), "bk_callback_url": get_node_callback_url(self.root_pipeline_id, self.id, getattr(self, "version", "")), } if job_timeout: job_kwargs["timeout"] = int(job_timeout) job_result = client.job.fast_push_file(job_kwargs) self.logger.info("job_result: {result}, job_kwargs: {kwargs}".format( result=job_result, kwargs=job_kwargs)) if job_result["result"]: job_instance_id = job_result["data"]["job_instance_id"] data.outputs.job_inst_id = job_instance_id data.outputs.job_inst_name = job_result["data"][ "job_instance_name"] data.outputs.job_inst_url = get_job_instance_url( biz_cc_id, job_instance_id) data.outputs.client = client return True else: message = job_handle_api_error("job.fast_push_file", job_kwargs, job_result) self.logger.error(message) data.outputs.ex_data = message return False