def format_template_list_data(templates, project=None, return_id_list=False): data = [] ids = [] for tmpl in templates: item = { "id": tmpl.id, "name": tmpl.pipeline_template.name, "creator": tmpl.pipeline_template.creator, "create_time": format_datetime(tmpl.pipeline_template.create_time), "editor": tmpl.pipeline_template.editor, "edit_time": format_datetime(tmpl.pipeline_template.edit_time), "category": tmpl.category, } if project: item.update({ "project_id": project.id, "project_name": project.name, "bk_biz_id": project.bk_biz_id, "bk_biz_name": project.name if project.from_cmdb else None, }) if return_id_list: ids.append(item["id"]) data.append(item) if not return_id_list: return data return data, ids
def test_get_common_template_info(self): pt1 = MockPipelineTemplate(id=1, name="pt1") tmpl = MockCommonTemplate(id=TEST_TEMPLATE_ID, pipeline_template=pt1) with mock.patch( COMMONTEMPLATE_SELECT_RELATE, MagicMock(return_value=MockQuerySet(get_result=tmpl)), ): pipeline_tree = copy.deepcopy(tmpl.pipeline_tree) pipeline_tree.pop("line") pipeline_tree.pop("location") assert_data = { "id": tmpl.id, "name": tmpl.pipeline_template.name, "creator": tmpl.pipeline_template.creator, "create_time": format_datetime(tmpl.pipeline_template.create_time), "editor": tmpl.pipeline_template.editor, "edit_time": format_datetime(tmpl.pipeline_template.edit_time), "category": tmpl.category, "pipeline_tree": pipeline_tree, } response = self.client.get(path=self.url().format( template_id=TEST_TEMPLATE_ID)) self.assertEqual(response.status_code, 200) data = json.loads(response.content) self.assertTrue(data["result"], msg=data) self.assertEqual(assert_data, data["data"])
def format_template_data(template, project=None): pipeline_tree = template.pipeline_tree pipeline_tree.pop("line") pipeline_tree.pop("location") varschema.add_schema_for_input_vars(pipeline_tree) data = { "id": template.id, "name": template.pipeline_template.name, "creator": template.pipeline_template.creator, "create_time": format_datetime(template.pipeline_template.create_time), "editor": template.pipeline_template.editor, "edit_time": format_datetime(template.pipeline_template.edit_time), "category": template.category, "pipeline_tree": pipeline_tree, } if project: data.update({ "project_id": project.id, "project_name": project.name, "bk_biz_id": project.bk_biz_id, "bk_biz_name": project.name if project.from_cmdb else None, }) return data
def test_get_template_info__for_project_template(self): pt1 = MockPipelineTemplate(id=1, name="pt1") tmpl = MockTaskTemplate(id=1, pipeline_template=pt1) with mock.patch( TASKTEMPLATE_SELECT_RELATE, MagicMock(return_value=MockQuerySet(get_result=tmpl)), ): pipeline_tree = copy.deepcopy(tmpl.pipeline_tree) pipeline_tree.pop("line") pipeline_tree.pop("location") assert_data = { "id": tmpl.id, "name": tmpl.pipeline_template.name, "creator": tmpl.pipeline_template.creator, "create_time": format_datetime(tmpl.pipeline_template.create_time), "editor": tmpl.pipeline_template.editor, "edit_time": format_datetime(tmpl.pipeline_template.edit_time), "category": tmpl.category, "project_id": TEST_PROJECT_ID, "project_name": TEST_PROJECT_NAME, "bk_biz_id": TEST_BIZ_CC_ID, "bk_biz_name": TEST_PROJECT_NAME, "pipeline_tree": pipeline_tree, } response = self.client.get(path=self.url().format( template_id=TEST_TEMPLATE_ID, project_id=TEST_PROJECT_ID)) data = json.loads(response.content) self.assertTrue(data["result"], msg=data) self.assertEqual(assert_data, data["data"])
def _format_status_time(status_tree): status_tree.setdefault("children", {}) status_tree.pop("created_time", "") started_time = status_tree.pop("started_time", None) archived_time = status_tree.pop("archived_time", None) if "elapsed_time" not in status_tree: status_tree["elapsed_time"] = calculate_elapsed_time(started_time, archived_time) status_tree["start_time"] = format_datetime(started_time) if started_time else None status_tree["finish_time"] = format_datetime(archived_time) if archived_time else None
def test_get_common_template_list(self): pt1 = MockPipelineTemplate(id=1, name="pt1") pt2 = MockPipelineTemplate(id=2, name="pt2") task_tmpl1 = MockCommonTemplate(id=1, pipeline_template=pt1) task_tmpl2 = MockCommonTemplate(id=2, pipeline_template=pt2) task_templates = [task_tmpl1, task_tmpl2] with mock.patch( COMMONTEMPLATE_SELECT_RELATE, MagicMock(return_value=MockQuerySet( filter_result=task_templates)), ): with mock.patch( MOCK_GET_COMMON_FLOW_ALLOWED_ACTIONS, MagicMock( return_value=TEST_COMMON_TEMPLATES_ALLOWED_ACTIONS) ) as mock_get_actions: assert_data = [{ "id": tmpl.id, "name": tmpl.pipeline_template.name, "creator": tmpl.pipeline_template.creator, "create_time": format_datetime(tmpl.pipeline_template.create_time), "editor": tmpl.pipeline_template.editor, "edit_time": format_datetime(tmpl.pipeline_template.edit_time), "category": tmpl.category, "auth_actions": ["TEST_ACTION"], } for tmpl in task_templates] response = self.client.get( path=self.url(), HTTP_BK_APP_CODE=TEST_APP_CODE, HTTP_BK_USERNAME=TEST_USERNAME, ) mock_get_actions.assert_called_once_with( "", COMMON_FLOW_ACTIONS, TEST_ID_LIST, ) self.assertEqual(response.status_code, 200) data = json.loads(response.content) self.assertTrue(data["result"], msg=data) self.assertEqual(data["data"], assert_data)
def test_get_tasks_status__success_with_children_status(self): task = MockTaskFlowInstance(get_status_return={"children": {}}) task.name = "task_name" dispatcher = MagicMock() dispatcher.get_task_status = MagicMock(return_value=DISPATCHER_RETURN) with patch(GET_TASK_STATUS_TASK_COMMAND_DISPATCHER, MagicMock(return_value=dispatcher)): with patch(TASKFLOW_OBJECTS_FILTER, MagicMock(return_value=[task])): response = self.client.post( path=self.url().format(project_id=TEST_PROJECT_ID), data=json.dumps({ "task_id_list": [1, 2, 3], "include_children_status": True }), content_type="application/json", ) data = json.loads(response.content) self.assertTrue(data["result"]) self.assertEqual(data["code"], err_code.SUCCESS.code) self.assertEqual( data["data"], [{ "id": task.id, "name": task.name, "status": { "children": {}, "name": task.name }, "flow_type": task.flow_type, "current_flow": task.current_flow, "is_deleted": task.is_deleted, "create_time": format_datetime(task.create_time), "start_time": format_datetime(task.start_time), "finish_time": format_datetime(task.finish_time), "url": task.url, }], )
def group_by_atom_execute(self, tasktmpl, filters, page, limit): # 需要获得符合的查询的对应 template_id 列表 # 获得所有类型的dict列表 category_dict = dict(TASK_CATEGORY) # 获取标准插件code component_code = filters.get("component_code") # 获取到组件code对应的template_node_template template_node_template_list = TemplateNodeStatistics.objects.filter( component_code=component_code) total = template_node_template_list.count() atom_template_data = template_node_template_list.values( "template_id", "project_id", "category", "template_edit_time", "template_creator", )[(page - 1) * limit:page * limit] # 获取project_name、template_name数据 project_id_list = template_node_template_list.values_list("project_id", flat=True) template_id_list = template_node_template_list.values_list( "template_id", flat=True) project_dict = dict( Project.objects.filter(id__in=project_id_list).values_list( "id", "name")) tempalte_dict = dict( self.filter(id__in=template_id_list).values_list( "id", "pipeline_template__name")) groups = [] # 循环聚合信息 for data in atom_template_data: groups.append({ "template_id": data["template_id"], "project_id": data["project_id"], "project_name": project_dict.get(data["project_id"], ""), "template_name": tempalte_dict.get(int(data["template_id"]), ""), "category": category_dict[data["category"]], "edit_time": format_datetime(data["template_edit_time"]), "editor": data["template_creator"], }) return total, groups
def info_data_from_period_task(task, detail=True): info = { "id": task.id, "name": task.name, "template_id": task.template_id, "template_source": task.template_source, "creator": task.creator, "cron": task.cron, "enabled": task.enabled, "last_run_at": format_datetime(task.last_run_at), "total_run_count": task.total_run_count, } if detail: info["form"] = task.form info["pipeline_tree"] = task.pipeline_tree return info
def test_get_periodic_task_list(self): pt1 = MockPeriodicTask(id="1") pt2 = MockPeriodicTask(id="2") pt3 = MockPeriodicTask(id="3") periodic_tasks = [pt1, pt2, pt3] assert_data = [{ "id": task.id, "name": task.name, "template_id": task.template_id, "template_source": task.template_source, "creator": task.creator, "cron": task.cron, "enabled": task.enabled, "last_run_at": format_datetime(task.last_run_at), "total_run_count": task.total_run_count, "auth_actions": ["TEST_ACTION"], } for task in periodic_tasks] with mock.patch( MOCK_GET_PERIODIC_TASK_ALLOWED_ACTIONS, MagicMock(return_value=TEST_PERIODIC_TASK_ALLOWED_ACTIONS) ) as mock_get_actions: with mock.patch(PERIODIC_TASK_FILTER, MagicMock(return_value=periodic_tasks)): response = self.client.get( path=self.url().format(project_id=TEST_PROJECT_ID), HTTP_BK_APP_CODE=TEST_APP_CODE, HTTP_BK_USERNAME=TEST_USERNAME, ) mock_get_actions.assert_called_once_with( "", PERIODIC_TASK_ACTIONS, TEST_ID_LIST) data = json.loads(response.content) self.assertTrue(data["result"], msg=data) self.assertEqual(data["data"], assert_data)
def test_get_periodic_task_info__common_template(self): task = MockPeriodicTask(template_source="common") assert_data = { "id": task.id, "name": task.name, "template_id": task.template_id, "template_source": "common", "creator": task.creator, "cron": task.cron, "enabled": task.enabled, "last_run_at": format_datetime(task.last_run_at), "total_run_count": task.total_run_count, "form": task.form, "pipeline_tree": task.pipeline_tree, } with mock.patch(PERIODIC_TASK_GET, MagicMock(return_value=task)): response = self.client.get(path=self.url().format( task_id=TEST_PERIODIC_TASK_ID, project_id=TEST_PROJECT_ID)) data = json.loads(response.content) self.assertTrue(data["result"], msg=data) self.assertEqual(data["data"], assert_data)
def group_by_atom_template(self, tasktmpl, filters, page, limit): # 按起始时间、业务(可选)、类型(可选)、标准插件查询被引用的流程模板列表(dataTable) # 获得所有类型的dict列表 category_dict = dict(TASK_CATEGORY) tasktmpl_id_list = tasktmpl.values_list("id", flat=True) # 获取标准插件code component_code = filters.get("component_code") version = filters.get("version") is_remote = filters.get("is_remote", False) # 获取到组件code对应的template_id_list if component_code: template_node_template_data = TemplateNodeStatistics.objects.filter( task_template_id__in=tasktmpl_id_list, component_code=component_code, version=version, is_remote=is_remote, ) else: template_node_template_data = TemplateNodeStatistics.objects.filter( task_template_id__in=tasktmpl_id_list, ) total = template_node_template_data.count() atom_template_data = template_node_template_data.values( "template_id", "task_template_id", "project_id", "category", "template_create_time", "template_creator", )[(page - 1) * limit:page * limit] groups = [] # 在template_node_tempalte_data中注入project_name和template_name project_id_list = template_node_template_data.values_list("project_id", flat=True) template_id_list = template_node_template_data.values_list( "template_id", flat=True) project_dict = dict( Project.objects.filter(id__in=project_id_list).values_list( "id", "name")) template_dict = dict( PipelineTemplate.objects.filter( id__in=template_id_list).values_list("id", "name")) # 循环聚合信息 for data in atom_template_data: groups.append({ "template_id": data["task_template_id"], "project_id": data["project_id"], "project_name": project_dict.get(data["project_id"], ""), "template_name": template_dict.get(int(data["template_id"]), ""), "category": category_dict[data["category"]], # 需要将code转为名称 "create_time": format_datetime(data["template_create_time"]), "creator": data["template_creator"], }) # order_by字段错误的情况默认使用-template_d排序 order_by = filters.get("order_by", "-template_id") if order_by.startswith("-"): # 需要去除负号 order_by = order_by[1:] groups = sorted(groups, key=lambda group: group.get(order_by), reverse=True) else: groups = sorted(groups, key=lambda group: group.get(order_by), reverse=False) return total, groups
def test_create_periodic_task__success(self): task = MockPeriodicTask() assert_data = { "id": task.id, "name": task.name, "template_id": task.template_id, "template_source": "project", "creator": task.creator, "cron": task.cron, "enabled": task.enabled, "last_run_at": format_datetime(task.last_run_at), "total_run_count": task.total_run_count, "form": task.form, "pipeline_tree": task.pipeline_tree, } proj = MockProject( project_id=TEST_PROJECT_ID, name=TEST_PROJECT_NAME, bk_biz_id=TEST_BIZ_CC_ID, from_cmdb=True, ) template = MockTaskTemplate() replace_template_id_mock = MagicMock() with mock.patch(TASKTEMPLATE_GET, MagicMock(return_value=template)): with mock.patch(PROJECT_GET, MagicMock(return_value=proj)): with mock.patch(PERIODIC_TASK_CREATE, MagicMock(return_value=task)): with mock.patch( APIGW_CREATE_PERIODIC_TASK_REPLACE_TEMPLATE_ID, replace_template_id_mock, ): response = self.client.post( path=self.url().format( template_id=TEST_TEMPLATE_ID, project_id=TEST_PROJECT_ID), data=json.dumps({ "name": task.name, "cron": task.cron, "template_source": "project", "exclude_task_nodes_id": "exclude_task_nodes_id", }), content_type="application/json", ) PipelineTemplateWebPreviewer.preview_pipeline_tree_exclude_task_nodes.assert_called_with( template.pipeline_tree, "exclude_task_nodes_id") PeriodicTask.objects.create.assert_called_once_with( project=proj, template=template, template_source="project", name=task.name, cron=task.cron, pipeline_tree=template.pipeline_tree, creator="", ) data = json.loads(response.content) replace_template_id_mock.assert_called_once_with( TaskTemplate, template.pipeline_tree) self.assertTrue(data["result"], msg=data) self.assertEqual(data["data"], assert_data)
def test_get_template_list__for_project_template(self): pt1 = MockPipelineTemplate(id=1, name="pt1") pt2 = MockPipelineTemplate(id=2, name="pt2") task_tmpl1 = MockTaskTemplate(id=1, pipeline_template=pt1) task_tmpl2 = MockTaskTemplate(id=2, pipeline_template=pt2) task_templates = [task_tmpl1, task_tmpl2] with patch( TASKTEMPLATE_SELECT_RELATE, MagicMock(return_value=MockQuerySet(filter_result=task_templates)), ): with patch(MOCK_GET_FLOW_ALLOWED_ACTIONS, MagicMock(return_value=TEST_ALLOWED_ACTIONS)): assert_data = [ { "id": tmpl.id, "name": tmpl.pipeline_template.name, "creator": tmpl.pipeline_template.creator, "create_time": format_datetime(tmpl.pipeline_template.create_time), "editor": tmpl.pipeline_template.editor, "edit_time": format_datetime(tmpl.pipeline_template.edit_time), "category": tmpl.category, "project_id": TEST_PROJECT_ID, "project_name": TEST_PROJECT_NAME, "bk_biz_id": TEST_PROJECT_ID, "bk_biz_name": TEST_PROJECT_NAME, "auth_actions": ["TEST_ACTION"], } for tmpl in task_templates ] response = self.client.get( path=self.url().format(project_id=TEST_PROJECT_ID), HTTP_BK_APP_CODE=TEST_APP_CODE, HTTP_BK_USERNAME=TEST_USERNAME, ) self.assertEqual(response.status_code, 200) data = json.loads(response.content) self.assertTrue(data["result"], msg=data) self.assertEqual(data["data"], assert_data) with patch( TASKTEMPLATE_SELECT_RELATE, MagicMock(return_value=MockQuerySet(filter_result=[])), ): assert_data = [] response = self.client.get( path=self.url().format(project_id=TEST_PROJECT_ID), HTTP_BK_APP_CODE=TEST_APP_CODE, HTTP_BK_USERNAME=TEST_USERNAME, ) data = json.loads(response.content) self.assertTrue(data["result"], msg=data) self.assertEqual(data["data"], assert_data)
def get_tasks_status(request, project_id): try: params = json.loads(request.body) except Exception: return { "result": False, "message": "request body is not a valid json", "code": err_code.REQUEST_PARAM_INVALID.code, } task_ids = params.get("task_id_list", []) if not isinstance(task_ids, list): return {"result": False, "message": "task_id_list must be a list", "code": err_code.REQUEST_PARAM_INVALID.code} if len(task_ids) > 50: return { "result": False, "message": "task_id_list is too long, maximum length is 50", "code": err_code.REQUEST_PARAM_INVALID.code } include_children_status = params.get("include_children_status", False) tasks = TaskFlowInstance.objects.filter(id__in=task_ids, project__id=request.project.id) data = [] for task in tasks: dispatcher = TaskCommandDispatcher( engine_ver=task.engine_ver, taskflow_id=task.id, pipeline_instance=task.pipeline_instance, project_id=project_id, ) result = dispatcher.get_task_status() if not result["result"]: return result status = result["data"] if not include_children_status and "children" in status: status.pop("children") if "name" not in status: add_node_name_to_status_tree(task.pipeline_instance.execution_data, status.get("children", {})) status["name"] = task.name data.append( { "id": task.id, "name": task.name, "status": status, "flow_type": task.flow_type, "current_flow": task.current_flow, "is_deleted": task.is_deleted, "create_time": format_datetime(task.create_time), "start_time": format_datetime(task.start_time), "finish_time": format_datetime(task.finish_time), "url": task.url, } ) return {"result": True, "data": data, "code": err_code.SUCCESS.code}
"subprocess_total": i, "gateways_total": i, "create_method": "test_create_method", } for i in range(1, TEST_LIMIT + 1)] TEST_TASKFLOW_STATISTICS = MagicMock( return_value=TEST_TASKFLOW_STATISTICS_DATA) TEST_INSTANCE_NAMEDATA = [(i, "test_instance") for i in range(1, TEST_LIMIT + 1)] TEST_PROJECT_NAMEDATA = [(TEST_PROJ_ID, "test_proj")] TEST_GROUPS = [{ "instance_id": i, "instance_name": dict(TEST_INSTANCE_NAMEDATA)[i], "project_id": TEST_PROJ_ID, "project_name": dict(TEST_PROJECT_NAMEDATA)[TEST_PROJ_ID], "category": "test_category", "create_time": format_datetime(TEST_CREATE_TIME), "creator": "test_creator", "elapsed_time": "elapsed_time", "atom_total": i, "subprocess_total": i, "gateways_total": i, "create_method": "test_create_method", } for i in range(1, TEST_LIMIT + 1)] class MockTaskflowStatistics(MagicMock): def values(self, *args, **kwargs): return TEST_TASKFLOW_STATISTICS_DATA class MockInstanceDict(MagicMock):