def setUp(self): self.now = datetime.now() self.u1 = User(username='******', display='用户1') self.u1.save() self.wf1 = SqlWorkflow( workflow_name='some_name2', group_id=1, group_name='g1', engineer=self.u1.username, engineer_display=self.u1.display, audit_auth_groups='some_group', create_time=self.now - timedelta(days=1), status='workflow_executing', is_backup='是', instance_name='some_instance', db_name='some_db', sql_content='some_sql', sql_syntax=1, execute_result='' ) self.wf1.save() # 初始化工单执行返回对象 self.task_result = MagicMock() self.task_result.args = [self.wf1.id] self.task_result.success = True self.task_result.stopped = self.now self.task_result.result.json.return_value = json.dumps([{ 'id': 1, 'sql': 'some_content'}]) self.task_result.result.warning = '' self.task_result.result.error = ''
class AsyncTest(TestCase): def setUp(self): self.now = datetime.now() self.u1 = User(username='******', display='用户1') self.u1.save() self.wf1 = SqlWorkflow(workflow_name='some_name2', group_id=1, group_name='g1', engineer=self.u1.username, engineer_display=self.u1.display, audit_auth_groups='some_group', create_time=self.now - timedelta(days=1), status='workflow_executing', is_backup='是', instance_name='some_instance', db_name='some_db', sql_content='some_sql', sql_syntax=1, execute_result='') self.wf1.save() # 初始化工单执行返回对象 self.task_result = MagicMock() self.task_result.args = [self.wf1.id] self.task_result.success = True self.task_result.stopped = self.now self.task_result.result.json.return_value = json.dumps([{ 'id': 1, 'sql': 'some_content' }]) self.task_result.result.warning = '' self.task_result.result.error = '' def tearDown(self): self.wf1.delete() self.u1.delete() self.task_result = None @patch('sql.utils.execute_sql.notify_for_execute') @patch('sql.utils.execute_sql.Audit') def test_call_back(self, mock_audit, mock_notify): mock_audit.detail_by_workflow_id.return_value.audit_id = 123 mock_audit.add_log.return_value = 'any thing' execute_callback(self.task_result) mock_audit.detail_by_workflow_id.assert_called_with( workflow_id=self.wf1.id, workflow_type=ANY) mock_audit.add_log.assert_called_with( audit_id=123, operation_type=ANY, operation_type_desc=ANY, operation_info="执行结果:已正常结束", operator=ANY, operator_display=ANY, ) mock_notify.assert_called_once()
def setUpClass(cls): cls.u1 = User(username='******', display='用户1') cls.u1.save() cls.u2 = User(username='******', display='用户2') cls.u2.save() cls.superuser1 = User(username='******', is_superuser=True) cls.superuser1.save() cls.now = datetime.datetime.now() cls.slave1 = Instance(instance_name='test_slave_instance', type='slave', db_type='mysql', host='testhost', port=3306, user='******', password='******') cls.slave1.save() # 批量创建数据 ddl ,u1 ,g1, yesterday 组, 2 个数据 ddl_workflow = [SqlWorkflow( workflow_name='ddl %s' % i, group_id=1, group_name='g1', engineer=cls.u1.username, engineer_display=cls.u1.display, audit_auth_groups='some_group', create_time=cls.now - datetime.timedelta(days=1), status='workflow_finish', is_backup=True, instance=cls.slave1, db_name='some_db', syntax_type=1 ) for i in range(2)] # 批量创建数据 dml ,u1 ,g2, the day before yesterday 组, 3 个数据 dml_workflow = [SqlWorkflow( workflow_name='Test %s' % i, group_id=2, group_name='g2', engineer=cls.u2.username, engineer_display=cls.u2.display, audit_auth_groups='some_group', create_time=cls.now - datetime.timedelta(days=2), status='workflow_finish', is_backup=True, instance=cls.slave1, db_name='some_db', syntax_type=2 ) for i in range(3)] SqlWorkflow.objects.bulk_create(ddl_workflow + dml_workflow) # 保存内容数据 ddl_workflow_content = [SqlWorkflowContent( workflow=SqlWorkflow.objects.get(workflow_name='ddl %s' % i), sql_content='some_sql', ) for i in range(2)] dml_workflow_content = [SqlWorkflowContent( workflow=SqlWorkflow.objects.get(workflow_name='Test %s' % i), sql_content='some_sql', ) for i in range(3)] SqlWorkflowContent.objects.bulk_create(ddl_workflow_content + dml_workflow_content)
def execute(workflow_id, user=None): """为延时或异步任务准备的execute, 传入工单ID和执行人信息""" # 使用当前读防止重复执行 with transaction.atomic(): workflow_detail = SqlWorkflow.objects.select_for_update().get( id=workflow_id) # 只有审核通过和定时执行的数据才可以继续执行 if workflow_detail.status not in [ 'workflow_review_pass', 'workflow_timingtask' ]: raise Exception('工单状态不正确,禁止执行!') # 将工单状态修改为执行中 else: SqlWorkflow( id=workflow_id, status='workflow_executing').save(update_fields=['status']) # 增加执行日志 audit_id = Audit.detail_by_workflow_id( workflow_id=workflow_id, workflow_type=WorkflowDict.workflow_type['sqlreview']).audit_id Audit.add_log(audit_id=audit_id, operation_type=5, operation_type_desc='执行工单', operation_info='人工操作执行' if user else '系统定时执行', operator=user.username if user else '', operator_display=user.display if user else '系统') execute_engine = get_engine(instance=workflow_detail.instance) return execute_engine.execute_workflow(workflow=workflow_detail)
def query(workflow_id): """为延时或异步任务准备的queryx, 传入工单ID即可""" workflow = SqlWorkflow.objects.get(id=workflow_id) # 给定时执行的工单增加执行日志 if workflow.status == 'workflow_timingtask': # 将工单状态修改为执行中 SqlWorkflow(id=workflow_id, status='workflow_executing').save(update_fields=['status']) audit_id = Audit.detail_by_workflow_id( workflow_id=workflow_id, workflow_type=WorkflowDict.workflow_type['sqlreview']).audit_id Audit.add_log(audit_id=audit_id, operation_type=5, operation_type_desc='执行工单', operation_info='系统定时执行', operator='', operator_display='系统') query_engine = get_engine(instance=workflow.instance) with FuncTimer() as t: query_result = query_engine.query( workflow.db_name, workflow.sqlworkflowcontent.sql_content) # if workflow.instance.db_type == 'pgsql': # TODO 此处判断待优化,请在 修改传参方式后去除 # query_result = query_engine.query(workflow.db_name, workflow.sqlworkflowcontent.sql_content, # schema_name=workflow.schema_name) # else: # query_result = query_engine.query(workflow.db_name, workflow.sqlworkflowcontent.sql_content) query_result.query_time = t.cost return query_result
def setUp(self): self.now = datetime.now() self.u1 = User(username='******', display='用户1') self.u1.save() self.superuser1 = User(username='******', is_superuser=True) self.superuser1.save() self.wf1 = SqlWorkflow( workflow_name='some_name', group_id=1, group_name='g1', engineer=self.u1.username, engineer_display=self.u1.display, audit_auth_groups='some_group', create_time=self.now - timedelta(days=1), status='workflow_finish', is_backup='是', instance_name='some_instance', db_name='some_db', sql_content='some_sql', sql_syntax=1, execute_result=json.dumps([{ 'id': 1, 'sql': 'some_content' }]) ) self.wf1.save() self.wf2 = SqlWorkflow( workflow_name='some_name2', group_id=1, group_name='g1', engineer=self.u1.username, engineer_display=self.u1.display, audit_auth_groups='some_group', create_time=self.now - timedelta(days=1), status='workflow_manreviewing', is_backup='是', instance_name='some_instance', db_name='some_db', sql_content='some_sql', sql_syntax=1, execute_result=json.dumps([{ 'id': 1, 'sql': 'some_content' }]) ) self.wf2.save()
def setUpClass(cls): cls.u1 = User(username='******', display='用户1') cls.u1.save() cls.u2 = User(username='******', display='用户2') cls.u2.save() cls.superuser1 = User(username='******', is_superuser=True) cls.superuser1.save() cls.now = datetime.datetime.now() # 批量创建数据 ddl ,u1 ,g1, yesterday 组, 2 个数据 ddl_workflow = [ SqlWorkflow(workflow_name='ddl %s' % i, group_id=1, group_name='g1', engineer=cls.u1.username, engineer_display=cls.u1.display, audit_auth_groups='some_group', create_time=cls.now - datetime.timedelta(days=1), status='已正常结束', is_backup='是', instance_name='some_instance', db_name='some_db', sql_content='some_sql', sql_syntax=1) for i in range(2) ] # 批量创建数据 dml ,u1 ,g2, the day before yesterday 组, 3 个数据 dml_workflow = [ SqlWorkflow(workflow_name='Test %s' % i, group_id=2, group_name='g2', engineer=cls.u2.username, engineer_display=cls.u2.display, audit_auth_groups='some_group', create_time=cls.now - datetime.timedelta(days=2), status='已正常结束', is_backup='是', instance_name='some_instance', db_name='some_db', sql_content='some_sql', sql_syntax=2) for i in range(3) ] SqlWorkflow.objects.bulk_create(ddl_workflow + dml_workflow)
def execute(workflow_id): """为延时或异步任务准备的execute, 传入工单ID即可""" workflow_detail = SqlWorkflow.objects.get(id=workflow_id) # 给定时执行的工单增加执行日志 if workflow_detail.status == 'workflow_timingtask': # 将工单状态修改为执行中 SqlWorkflow(id=workflow_id, status='workflow_executing').save(update_fields=['status']) audit_id = Audit.detail_by_workflow_id(workflow_id=workflow_id, workflow_type=WorkflowDict.workflow_type['sqlreview']).audit_id Audit.add_log(audit_id=audit_id, operation_type=5, operation_type_desc='执行工单', operation_info='系统定时执行', operator='', operator_display='系统' ) execute_engine = get_engine(instance=workflow_detail.instance) return execute_engine.execute_workflow(workflow=workflow_detail)
class WorkflowViewTest(TestCase): def setUp(self): self.now = datetime.now() self.u1 = User(username='******', display='用户1') self.u1.save() self.superuser1 = User(username='******', is_superuser=True) self.superuser1.save() self.wf1 = SqlWorkflow( workflow_name='some_name', group_id=1, group_name='g1', engineer=self.u1.username, engineer_display=self.u1.display, audit_auth_groups='some_group', create_time=self.now - timedelta(days=1), status='workflow_finish', is_backup='是', instance_name='some_instance', db_name='some_db', sql_content='some_sql', sql_syntax=1, execute_result=json.dumps([{ 'id': 1, 'sql': 'some_content' }]) ) self.wf1.save() self.wf2 = SqlWorkflow( workflow_name='some_name2', group_id=1, group_name='g1', engineer=self.u1.username, engineer_display=self.u1.display, audit_auth_groups='some_group', create_time=self.now - timedelta(days=1), status='workflow_manreviewing', is_backup='是', instance_name='some_instance', db_name='some_db', sql_content='some_sql', sql_syntax=1, execute_result=json.dumps([{ 'id': 1, 'sql': 'some_content' }]) ) self.wf2.save() def tearDown(self): self.u1.delete() self.superuser1.delete() self.wf1.delete() self.wf2.delete() def testWorkflowStatus(self): c = Client(header={}) c.force_login(self.u1) r = c.post('/getWorkflowStatus/', {'workflow_id': self.wf1.id}) r_json = r.json() self.assertEqual(r_json['status'], 'workflow_finish') @patch('sql.utils.workflow_audit.Audit.review_info') @patch('sql.utils.workflow_audit.Audit.can_review') def testWorkflowDetailView(self, _can_review, _review_info): _review_info.return_value = ('some_auth_group', 'current_auth_group') _can_review.return_value = False c = Client() c.force_login(self.u1) r = c.get('/detail/{}/'.format(self.wf1.id)) expected_status_display = r"""id="workflow_detail_disaply">已正常结束""" self.assertContains(r, expected_status_display) exepcted_status = r"""id="workflow_detail_status">workflow_finish""" self.assertContains(r, exepcted_status) def testWorkflowListView(self): c = Client() c.force_login(self.superuser1) r = c.post('/sqlworkflow_list/', {'limit': 10, 'offset': 0, 'navStatus': 'all'}) r_json = r.json() self.assertEqual(r_json['total'], 2) # 列表按创建时间倒序排列, 第二个是wf1 , 是已正常结束 self.assertEqual(r_json['rows'][1]['status'], 'workflow_finish') @patch('sql.utils.workflow_audit.Audit.detail_by_workflow_id') @patch('sql.utils.workflow_audit.Audit.audit') @patch('sql.utils.workflow_audit.Audit.can_review') def testWorkflowPassedView(self, _can_review, _audit, _detail_by_id): c = Client() c.force_login(self.superuser1) r = c.post('/passed/') self.assertContains(r, 'workflow_id参数为空.') _can_review.return_value = False r = c.post('/passed/', {'workflow_id': self.wf1.id}) self.assertContains(r, '你无权操作当前工单!') _can_review.return_value = True _detail_by_id.return_value.audit_id = 123 _audit.return_value = { "data": { "workflow_status": 1 # TODO 改为audit_success } } r = c.post('/passed/', data={'workflow_id': self.wf1.id, 'audit_remark': 'some_audit'}, follow=False) self.assertRedirects(r, '/detail/{}/'.format(self.wf1.id), fetch_redirect_response=False) self.wf1.refresh_from_db() self.assertEqual(self.wf1.status, 'workflow_review_pass') self.assertEqual(self.wf1.audit_remark, 'some_audit') @patch('sql.sql_workflow.Audit.add_log') @patch('sql.sql_workflow.Audit.detail_by_workflow_id') @patch('sql.sql_workflow.Audit.audit') # patch view里的can_cancel 而不是原始位置的can_cancel ,因为在调用时, 已经 import 了真的 can_cancel ,会导致mock失效 # 在import 静态函数时需要注意这一点, 动态对象因为每次都会重新生成,也可以 mock 原函数/方法/对象 # 参见 : https://docs.python.org/3/library/unittest.mock.html#where-to-patch @patch('sql.sql_workflow.can_cancel') def testWorkflowCancelView(self, _can_cancel, _audit, _detail_by_id, _add_log): c = Client() c.force_login(self.u1) r = c.post('/cancel/') self.assertContains(r, 'workflow_id参数为空.') r = c.post('/cancel/', data={'workflow_id': self.wf2.id}) self.assertContains(r, '终止原因不能为空') _can_cancel.return_value = False r = c.post('/cancel/', data={'workflow_id': self.wf2.id, 'cancel_remark': 'some_reason'}) self.assertContains(r, '你无权操作当前工单!') _can_cancel.return_value = True _detail_by_id = 123 r = c.post('/cancel/', data={'workflow_id': self.wf2.id, 'cancel_remark': 'some_reason'}) self.wf2.refresh_from_db() self.assertEqual('workflow_abort', self.wf2.status)
def post(self, request): # 参数验证 serializer = ExecuteWorkflowSerializer(data=request.data) if not serializer.is_valid(): return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) workflow_type = request.data['workflow_type'] workflow_id = request.data['workflow_id'] # 执行SQL上线工单 if workflow_type == 2: mode = request.data['mode'] engineer = request.data['engineer'] user = Users.objects.get(username=engineer) # 校验多个权限 if not (user.has_perm('sql.sql_execute') or user.has_perm('sql.sql_execute_for_resource_group')): raise serializers.ValidationError({"errors": "你无权执行当前工单!"}) if can_execute(user, workflow_id) is False: raise serializers.ValidationError({"errors": "你无权执行当前工单!"}) if on_correct_time_period(workflow_id) is False: raise serializers.ValidationError( {"errors": "不在可执行时间范围内,如果需要修改执行时间请重新提交工单!"}) # 获取审核信息 audit_id = Audit.detail_by_workflow_id( workflow_id=workflow_id, workflow_type=WorkflowDict.workflow_type['sqlreview']).audit_id # 交由系统执行 if mode == "auto": # 修改工单状态为排队中 SqlWorkflow( id=workflow_id, status="workflow_queuing").save(update_fields=['status']) # 删除定时执行任务 schedule_name = f"sqlreview-timing-{workflow_id}" del_schedule(schedule_name) # 加入执行队列 async_task('sql.utils.execute_sql.execute', workflow_id, user, hook='sql.utils.execute_sql.execute_callback', timeout=-1, task_name=f'sqlreview-execute-{workflow_id}') # 增加工单日志 Audit.add_log(audit_id=audit_id, operation_type=5, operation_type_desc='执行工单', operation_info='工单执行排队中', operator=user.username, operator_display=user.display) # 线下手工执行 elif mode == "manual": # 将流程状态修改为执行结束 SqlWorkflow(id=workflow_id, status="workflow_finish", finish_time=datetime.datetime.now()).save( update_fields=['status', 'finish_time']) # 增加工单日志 Audit.add_log(audit_id=audit_id, operation_type=6, operation_type_desc='手工工单', operation_info='确认手工执行结束', operator=user.username, operator_display=user.display) # 开启了Execute阶段通知参数才发送消息通知 sys_config = SysConfig() is_notified = 'Execute' in sys_config.get('notify_phase_control').split(',') \ if sys_config.get('notify_phase_control') else True if is_notified: notify_for_execute(SqlWorkflow.objects.get(id=workflow_id)) # 执行数据归档工单 elif workflow_type == 3: async_task('sql.archiver.archive', workflow_id, timeout=-1, task_name=f'archive-{workflow_id}') return Response({'msg': '开始执行,执行结果请到工单详情页查看'})
def post(self, request): # 参数验证 serializer = AuditWorkflowSerializer(data=request.data) if not serializer.is_valid(): return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) audit_type = request.data['audit_type'] workflow_type = request.data['workflow_type'] workflow_id = request.data['workflow_id'] audit_remark = request.data['audit_remark'] engineer = request.data['engineer'] user = Users.objects.get(username=engineer) # 审核查询权限申请 if workflow_type == 1: audit_status = 1 if audit_type == 'pass' else 2 if audit_remark is None: audit_remark = '' if Audit.can_review(user, workflow_id, workflow_type) is False: raise serializers.ValidationError({"errors": "你无权操作当前工单!"}) # 使用事务保持数据一致性 try: with transaction.atomic(): audit_id = Audit.detail_by_workflow_id( workflow_id=workflow_id, workflow_type=WorkflowDict.workflow_type['query'] ).audit_id # 调用工作流接口审核 audit_result = Audit.audit(audit_id, audit_status, user.username, audit_remark) # 按照审核结果更新业务表审核状态 audit_detail = Audit.detail(audit_id) if audit_detail.workflow_type == WorkflowDict.workflow_type[ 'query']: # 更新业务表审核状态,插入权限信息 _query_apply_audit_call_back( audit_detail.workflow_id, audit_result['data']['workflow_status']) except Exception as msg: logger.error(traceback.format_exc()) raise serializers.ValidationError({'errors': msg}) else: # 消息通知 async_task(notify_for_audit, audit_id=audit_id, audit_remark=audit_remark, timeout=60, task_name=f'query-priv-audit-{workflow_id}') return Response({ 'msg': 'passed' }) if audit_type == 'pass' else Response({'msg': 'canceled'}) # 审核SQL上线申请 elif workflow_type == 2: # SQL上线申请通过 if audit_type == 'pass': # 权限验证 if Audit.can_review(user, workflow_id, workflow_type) is False: raise serializers.ValidationError({"errors": "你无权操作当前工单!"}) # 使用事务保持数据一致性 try: with transaction.atomic(): # 调用工作流接口审核 audit_id = Audit.detail_by_workflow_id( workflow_id=workflow_id, workflow_type=WorkflowDict. workflow_type['sqlreview']).audit_id audit_result = Audit.audit( audit_id, WorkflowDict.workflow_status['audit_success'], user.username, audit_remark) # 按照审核结果更新业务表审核状态 if audit_result['data'][ 'workflow_status'] == WorkflowDict.workflow_status[ 'audit_success']: # 将流程状态修改为审核通过 SqlWorkflow(id=workflow_id, status='workflow_review_pass').save( update_fields=['status']) except Exception as msg: logger.error(traceback.format_exc()) raise serializers.ValidationError({'errors': msg}) else: # 开启了Pass阶段通知参数才发送消息通知 sys_config = SysConfig() is_notified = 'Pass' in sys_config.get('notify_phase_control').split(',') \ if sys_config.get('notify_phase_control') else True if is_notified: async_task(notify_for_audit, audit_id=audit_id, audit_remark=audit_remark, timeout=60, task_name=f'sqlreview-pass-{workflow_id}') return Response({'msg': 'passed'}) # SQL上线申请驳回/取消 elif audit_type == 'cancel': workflow_detail = SqlWorkflow.objects.get(id=workflow_id) if audit_remark is None: raise serializers.ValidationError({"errors": "终止原因不能为空"}) if can_cancel(user, workflow_id) is False: raise serializers.ValidationError({"errors": "你无权操作当前工单!"}) # 使用事务保持数据一致性 try: with transaction.atomic(): # 调用工作流接口取消或者驳回 audit_id = Audit.detail_by_workflow_id( workflow_id=workflow_id, workflow_type=WorkflowDict. workflow_type['sqlreview']).audit_id # 仅待审核的需要调用工作流,审核通过的不需要 if workflow_detail.status != 'workflow_manreviewing': # 增加工单日志 if user.username == workflow_detail.engineer: Audit.add_log(audit_id=audit_id, operation_type=3, operation_type_desc='取消执行', operation_info="取消原因:{}".format( audit_remark), operator=user.username, operator_display=user.display) else: Audit.add_log(audit_id=audit_id, operation_type=2, operation_type_desc='审批不通过', operation_info="审批备注:{}".format( audit_remark), operator=user.username, operator_display=user.display) else: if user.username == workflow_detail.engineer: Audit.audit( audit_id, WorkflowDict. workflow_status['audit_abort'], user.username, audit_remark) # 非提交人需要校验审核权限 elif user.has_perm('sql.sql_review'): Audit.audit( audit_id, WorkflowDict. workflow_status['audit_reject'], user.username, audit_remark) else: raise serializers.ValidationError( {"errors": "Permission Denied"}) # 删除定时执行task if workflow_detail.status == 'workflow_timingtask': schedule_name = f"sqlreview-timing-{workflow_id}" del_schedule(schedule_name) # 将流程状态修改为人工终止流程 workflow_detail.status = 'workflow_abort' workflow_detail.save() except Exception as msg: logger.error(f"取消工单报错,错误信息:{traceback.format_exc()}") raise serializers.ValidationError({'errors': msg}) else: # 发送取消、驳回通知,开启了Cancel阶段通知参数才发送消息通知 sys_config = SysConfig() is_notified = 'Cancel' in sys_config.get('notify_phase_control').split(',') \ if sys_config.get('notify_phase_control') else True if is_notified: audit_detail = Audit.detail_by_workflow_id( workflow_id=workflow_id, workflow_type=WorkflowDict. workflow_type['sqlreview']) if audit_detail.current_status in ( WorkflowDict.workflow_status['audit_abort'], WorkflowDict.workflow_status['audit_reject']): async_task( notify_for_audit, audit_id=audit_detail.audit_id, audit_remark=audit_remark, timeout=60, task_name=f'sqlreview-cancel-{workflow_id}') return Response({'msg': 'canceled'}) # 审核数据归档申请 elif workflow_type == 3: audit_status = 1 if audit_type == 'pass' else 2 if audit_remark is None: audit_remark = '' if Audit.can_review(user, workflow_id, workflow_type) is False: raise serializers.ValidationError({"errors": "你无权操作当前工单!"}) # 使用事务保持数据一致性 try: with transaction.atomic(): audit_id = Audit.detail_by_workflow_id( workflow_id=workflow_id, workflow_type=WorkflowDict.workflow_type['archive'] ).audit_id # 调用工作流插入审核信息,更新业务表审核状态 audit_status = Audit.audit( audit_id, audit_status, user.username, audit_remark)['data']['workflow_status'] ArchiveConfig( id=workflow_id, status=audit_status, state=True if audit_status == WorkflowDict.workflow_status['audit_success'] else False).save(update_fields=['status', 'state']) except Exception as msg: logger.error(traceback.format_exc()) raise serializers.ValidationError({'errors': msg}) else: # 消息通知 async_task(notify_for_audit, audit_id=audit_id, audit_remark=audit_remark, timeout=60, task_name=f'archive-audit-{workflow_id}') return Response({ 'msg': 'passed' }) if audit_type == 'pass' else Response({'msg': 'canceled'})