def create_data_project(username, project_id, cc_app_id, english_name): """ @summary: 在数据平台上创建项目信息 @note: 创建项目时调用,关联了蓝鲸业务的项目才需要创建 """ # 数据平台功能没有开启,则直接返回 if not IS_DATA_OPEN: return True project, _c = ProjectDataInfo.objects.get_or_create( project_id=project_id, defaults={'cc_biz_id': cc_app_id}) # 判断项目是否已经创建,已经创建则不再重复创建 if all([ project.data_project_id, project.standard_data_id, project.non_standard_data_id ]): return True # 申请标准日志采集 dataid, standard_data_name 修改 db 中的字段信息 standard_data_name = f'{DataType.SLOG.value}_{english_name}' res1, standard_data_id = deploy_plan(username, cc_app_id, standard_data_name, DataType.SLOG.value) if not res1: notify_manager(u"申请标准日志采集dataid[业务ID:%s,项目名:%s]失败,原因:%s,请关注" % (cc_app_id, english_name, standard_data_id)) raise error_codes.APIError.f("申请标准日志采集dataid失败:%s" % standard_data_id) # 申请非标准日志采集 dataid non_standard_data_name = f'{DataType.CLOG.value}_{english_name}' res2, non_standard_data_id = deploy_plan(username, cc_app_id, non_standard_data_name, DataType.CLOG.value) if not res2: notify_manager(u"申请非标准日志采集dataid[业务ID:%s,项目名:%s]失败,原因:%s,请关注" % (cc_app_id, english_name, non_standard_data_id)) raise error_codes.APIError.f("申请非标准日志采集dataid失败:%s" % non_standard_data_id) # 数据平台 V3 API 没有project_id的概念,给一个默认的值 project.data_project_id = 1 project.standard_data_id = standard_data_id project.standard_data_name = standard_data_name project.non_standard_data_id = non_standard_data_id project.non_standard_data_name = non_standard_data_name project.save() return True
def create_prometheus_data_flow(username, project_id, cc_app_id, english_name, dataset): """prometheus 类型的Metric申请数据平台的dataid,并配置默认的清洗入库规则 """ # 数据平台功能没有开启,则直接返回 if not IS_DATA_OPEN: return True, u"数据平台功能暂未开启" # 1. 提交接入部署计划,获取dataid is_ok, data_id = deploy_plan(username, cc_app_id, dataset, DataType.METRIC.value) if not is_ok: notify_manager( u"申请Prometheus Metric dataid[业务ID:%s,项目名:%s]失败,原因:%s,请关注" % (cc_app_id, english_name, data_id)) return False, "申请Prometheus Metric dataid:%s" % data_id # 2. 创建清洗配置,并启动清洗任务 res, result_table_id = setup_clean(username, cc_app_id, data_id, DataType.METRIC.value) if not res: notify_manager(u"创建Prometheus Metric清洗任务失败[%s],原因:%s,请关注" % (english_name, result_table_id)) return False, u"创建Prometheus Metric清洗任务失败:%s" % result_table_id # 3. 创建分发存储,并启动对应的分发任务 res2, msg = setup_shipper(data_id, result_table_id, DataType.METRIC.value) if not res2: notify_manager(u"启动非标准日志采集分发任务失败[%s],原因:%s,请关注" % (project_id, msg)) return False, u"启动非标准日志采集分发任务失败:%s" % msg return True, data_id
def apply_dataid_by_metric(biz_id, dataset, operator): # 数据平台功能没有开启,则直接返回 if not IS_DATA_OPEN: return True, 0 is_ok, data_id = deploy_plan(operator, biz_id, dataset, DataType.METRIC.value) return is_ok, data_id
def create_prometheus_data_flow(username, project_id, cc_app_id, english_name, dataset): """prometheus 类型的Metric申请数据平台的dataid,并配置默认的清洗入库规则 """ # 数据平台功能没有开启,则直接返回 if not IS_DATA_OPEN: return True, _("数据平台功能暂未开启") # 1. 提交接入部署计划,获取dataid is_ok, data_id = deploy_plan(username, cc_app_id, dataset, DataType.METRIC.value) if not is_ok: message = '''{prefix_msg}Prometheus Metric dataid[{biz}ID:{cc_app_id},{project}:{english_name}]{fail}, {reason}:{data_id},{suffix_msg}'''.format( prefix_msg=_("申请标准日志采集"), biz=_("业务"), cc_app_id=cc_app_id, project=_("项目名"), english_name=english_name, fail=_("失败"), reason=_("原因"), data_id=data_id, suffix_msg=_("失败") ) notify_manager(message) return False, '{}Prometheus Metric dataid:{}'.format(_("申请"), data_id) # 2. 创建清洗配置,并启动清洗任务 res, result_table_id = setup_clean(username, cc_app_id, data_id, DataType.METRIC.value) if not res: notify_manager('''{prefix_msg}Prometheus Metric{clean_task}[{english_name}], {reason}:{table_id},{suffix_msg}'''.format( prefix_msg=_("创建"), clean_task=_("清洗任务失败"), english_name=english_name, reason=_("原因"), table_id=result_table_id, suffix_msg=_("请关注") )) return False, '{}Prometheus Metric{}:{}'.format(_("创建"), _("清洗任务失败"), result_table_id) # 3. 创建分发存储,并启动对应的分发任务 res2, msg = setup_shipper(data_id, result_table_id, DataType.METRIC.value) if not res2: message = '{prefix_msg}[{project_id}],{reason}:{msg},{suffix_msg}'.format( prefix_msg=_("启动非标准日志采集分发任务失败"), project_id=project_id, reason=_("原因"), msg=msg, suffix_msg=_("请关注") ) notify_manager(message) return False, '{}:{}'.format(_("启动非标准日志采集分发任务失败"), msg) return True, data_id
def deploy_log_plan(username, cc_biz_id, project_code, data_log_type): data_log_name = f"{data_log_type}_{project_code}_{cc_biz_id}" data_id = get_data_id_by_name(data_log_name) if data_id: return data_id, data_log_name result, data = deploy_plan(username, cc_biz_id, data_log_name, data_log_type) if not result: notify_manager( _("申请{data_log_type}采集dataid[业务ID:{cc_biz_id},项目名:{project_code}]失败,原因:{data},请关注").format( data_log_type=data_log_type, cc_biz_id=cc_biz_id, project_code=project_code, data=data ) ) raise error_codes.APIError(_("申请{}采集dataid失败:{}").format(data_log_type, data)) return data, data_log_name
def create_prometheus_data_flow(username, project_id, cc_app_id, english_name, dataset): """prometheus 类型的Metric申请数据平台的dataid,并配置默认的清洗入库规则 """ # 数据平台功能没有开启,则直接返回 if not IS_DATA_OPEN: return True, _("数据平台功能暂未开启") # 1. 提交接入部署计划,获取dataid is_ok, data_id = deploy_plan(username, cc_app_id, dataset, DataType.METRIC.value) if not is_ok: message = _( '申请标准日志采集Prometheus Metric dataid[业务ID:{cc_app_id},项目名:{english_name}]失败,原因:{data_id},失败' ).format( cc_app_id=cc_app_id, english_name=english_name, data_id=data_id, ) notify_manager(message) return False, _('申请Prometheus Metric dataid:{}').format(data_id) # 2. 创建清洗配置,并启动清洗任务 res, result_table_id = setup_clean(username, cc_app_id, data_id, DataType.METRIC.value) if not res: notify_manager( _('创建Prometheus Metric清洗任务失败[{english_name}],原因:{table_id}请关注'). format( english_name=english_name, table_id=result_table_id, )) return False, _('创建Prometheus Metric清洗任务失败:{}').format(result_table_id) # 3. 创建分发存储,并启动对应的分发任务 res2, msg = setup_shipper(data_id, result_table_id, DataType.METRIC.value) if not res2: message = _('启动非标准日志采集分发任务失败[{}],原因:{},请关注').format(project_id, msg=msg) notify_manager(message) return False, _('启动非标准日志采集分发任务失败:{}').format(msg) return True, data_id
def create_data_project(username, project_id, cc_app_id, english_name): """ @summary: 在数据平台上创建项目信息 @note: 创建项目时调用,关联了蓝鲸业务的项目才需要创建 """ # 数据平台功能没有开启,则直接返回 if not IS_DATA_OPEN: return True project, _c = ProjectDataInfo.objects.get_or_create(project_id=project_id, defaults={'cc_biz_id': cc_app_id}) # 判断项目是否已经创建,已经创建则不再重复创建 if all([project.data_project_id, project.standard_data_id, project.non_standard_data_id]): return True # 申请标准日志采集 dataid, standard_data_name 修改 db 中的字段信息 standard_data_name = f'{DataType.SLOG.value}_{english_name}' res1, standard_data_id = deploy_plan(username, cc_app_id, standard_data_name, DataType.SLOG.value) if not res1: message = '''{prefix_msg}dataid{biz}ID:{cc_app_id},{project}:{english_name}{fail}, {reason}:{standard_data_id},{suffix_msg}'''.format( prefix_msg=_("申请标准日志采集"), biz=_("业务"), cc_app_id=cc_app_id, project=_("项目名"), english_name=english_name, fail=_("失败"), reason=_("原因"), standard_data_id=standard_data_id, suffix_msg=_("失败") ) notify_manager(message) raise error_codes.APIError('{}dataid{}:{}'.format(_("申请标准日志采集"), _("失败"), standard_data_id)) # 申请非标准日志采集 dataid non_standard_data_name = f'{DataType.CLOG.value}_{english_name}' res2, non_standard_data_id = deploy_plan(username, cc_app_id, non_standard_data_name, DataType.CLOG.value) if not res2: message = '''{prefix_msg}dataid{biz}ID:{cc_app_id},{project}:{english_name}{fail}, {reason}:{standard_data_id},{suffix_msg}'''.format( prefix_msg=_("申请非标准日志采集"), biz=_("业务"), cc_app_id=cc_app_id, project=_("项目名"), english_name=english_name, fail=_("失败"), reason=_("原因"), standard_data_id=standard_data_id, suffix_msg=_("请关注") ) notify_manager(message) raise error_codes.APIError('{}dataid{}:{}'.format(_("申请非标准日志采集"), _("失败"), standard_data_id)) # 数据平台 V3 API 没有project_id的概念,给一个默认的值 project.data_project_id = 1 project.standard_data_id = standard_data_id project.standard_data_name = standard_data_name project.non_standard_data_id = non_standard_data_id project.non_standard_data_name = non_standard_data_name project.save() return True