Esempio n. 1
0
def _create_and_start_data_flow(username, project_id, cc_biz_id, data_type):
    if not IS_DATA_OPEN:
        return True, _("数据平台功能暂未开启")

    try:
        project_data = ProjectDataInfo.objects.get(project_id=project_id, cc_biz_id=cc_biz_id)
    except ProjectDataInfo.DoesNotExist:
        return False, _("请先在数据平台创建项目信息")

    if not project_data.standard_data_id and data_type == DataType.SLOG.value:
        logger.error("no standard_data_id")
        return True, ""

    if not project_data.non_standard_data_id and data_type == DataType.CLOG.value:
        logger.error("no non_standard_data_id")
        return True, ""

    data_bus_cls = create_data_bus(data_type)
    data_bus = data_bus_cls(project_data)
    result, message = data_bus.clean_and_storage_data(username)
    if not result:
        message = _("启动{}日志采集清洗和入库任务失败[{}],原因:{},请关注").format(data_type, project_id, message)
        notify_manager(message)
        return False, message

    return True, message
Esempio n. 2
0
def create_and_start_standard_data_flow(username, project_id, cc_app_id):
    """
    @summary: 标准日志:创建清洗配置,并启动清洗任务;创建分发存储,并启动对应的分发任务
    @note: 初始化集群时调用
    @return: True/data_project_id, False/error_msg
    """
    # 数据平台功能没有开启,则直接返回
    if not IS_DATA_OPEN:
        return True, _("数据平台功能暂未开启")

    try:
        project = ProjectDataInfo.objects.get(project_id=project_id)
    except Exception:
        return False, _("请先在数据平台创建项目信息")
    # db 中已经有任务信息,则说明已经创建/启动了任务,不需要再重复启动
    if project.standard_flow_id and project.standard_flow_task_id:
        return project.standard_flow_id

    # 已经创建清洗配置则不再重新创建,只分发任务
    if not project.standard_flow_id:
        # 创建清洗配置
        res, flow_id = setup_clean(username, cc_app_id, project.standard_data_id, DataType.SLOG.value)
        if not res:
            message = '{prefix_msg}[{project_id}],{reason}:{flow_id},{suffix_msg}'.format(
                prefix_msg=_("启动标准日志采集清洗任务失败"),
                project_id=project_id,
                reason=_("原因"),
                flow_id=flow_id,
                suffix_msg=_("请关注")
            )
            notify_manager(message)
            return False, '{}:{}'.format(_("启动标准日志采集清洗任务失败"), flow_id)
    else:
        flow_id = project.standard_flow_id

    # 启动分发任务
    res2, flow_task_id = setup_shipper(project.standard_data_id, flow_id, DataType.SLOG.value)
    if not res2:
        message = '{prefix_msg}[{project_id}],{reason}:{flow_task_id},{suffix_msg}'.format(
            prefix_msg=_("启动标准日志采集分发任务失败"),
            project_id=project_id,
            reason=_("原因"),
            flow_task_id=flow_task_id,
            suffix_msg=_("请关注")
        )
        notify_manager(message)
        return False, '{}:{}'.format(_("启动标准日志采集清洗任务失败"), flow_task_id)
    # 将任务相关的id保存到db中,下次初始化集群则可以直接查询状态
    project.standard_flow_id = flow_id
    project.standard_flow_task_id = flow_task_id
    project.save()
    return True, _("启动标准日志采集任务成功")
Esempio n. 3
0
def create_data_project(username, project_id, cc_app_id, english_name):
    """
    @summary: 在数据平台上创建项目信息
    @note: 创建项目时调用,关联了蓝鲸业务的项目才需要创建
    """
    # 数据平台功能没有开启,则直接返回
    if not IS_DATA_OPEN:
        return True

    project, _c = ProjectDataInfo.objects.get_or_create(
        project_id=project_id, defaults={'cc_biz_id': cc_app_id})
    # 判断项目是否已经创建,已经创建则不再重复创建
    if all([
            project.data_project_id, project.standard_data_id,
            project.non_standard_data_id
    ]):
        return True

    # 申请标准日志采集 dataid, standard_data_name 修改 db 中的字段信息
    standard_data_name = f'{DataType.SLOG.value}_{english_name}'
    res1, standard_data_id = deploy_plan(username, cc_app_id,
                                         standard_data_name,
                                         DataType.SLOG.value)
    if not res1:
        notify_manager(u"申请标准日志采集dataid[业务ID:%s,项目名:%s]失败,原因:%s,请关注" %
                       (cc_app_id, english_name, standard_data_id))
        raise error_codes.APIError.f("申请标准日志采集dataid失败:%s" % standard_data_id)

    # 申请非标准日志采集 dataid
    non_standard_data_name = f'{DataType.CLOG.value}_{english_name}'
    res2, non_standard_data_id = deploy_plan(username, cc_app_id,
                                             non_standard_data_name,
                                             DataType.CLOG.value)
    if not res2:
        notify_manager(u"申请非标准日志采集dataid[业务ID:%s,项目名:%s]失败,原因:%s,请关注" %
                       (cc_app_id, english_name, non_standard_data_id))
        raise error_codes.APIError.f("申请非标准日志采集dataid失败:%s" %
                                     non_standard_data_id)

    # 数据平台 V3 API 没有project_id的概念,给一个默认的值
    project.data_project_id = 1

    project.standard_data_id = standard_data_id
    project.standard_data_name = standard_data_name
    project.non_standard_data_id = non_standard_data_id
    project.non_standard_data_name = non_standard_data_name
    project.save()
    return True
Esempio n. 4
0
def deploy_log_plan(username, cc_biz_id, project_code, data_log_type):
    data_log_name = f"{data_log_type}_{project_code}_{cc_biz_id}"
    data_id = get_data_id_by_name(data_log_name)
    if data_id:
        return data_id, data_log_name

    result, data = deploy_plan(username, cc_biz_id, data_log_name, data_log_type)
    if not result:
        notify_manager(
            _("申请{data_log_type}采集dataid[业务ID:{cc_biz_id},项目名:{project_code}]失败,原因:{data},请关注").format(
                data_log_type=data_log_type, cc_biz_id=cc_biz_id, project_code=project_code, data=data
            )
        )
        raise error_codes.APIError(_("申请{}采集dataid失败:{}").format(data_log_type, data))

    return data, data_log_name
Esempio n. 5
0
def create_and_start_standard_data_flow(username, project_id, cc_app_id):
    """
    @summary: 标准日志:创建清洗配置,并启动清洗任务;创建分发存储,并启动对应的分发任务
    @note: 初始化集群时调用
    @return: True/data_project_id, False/error_msg
    """
    # 数据平台功能没有开启,则直接返回
    if not IS_DATA_OPEN:
        return True, u"数据平台功能暂未开启"

    try:
        project = ProjectDataInfo.objects.get(project_id=project_id)
    except Exception:
        return False, u"请先在数据平台创建项目信息"
    # db 中已经有任务信息,则说明已经创建/启动了任务,不需要再重复启动
    if project.standard_flow_id and project.standard_flow_task_id:
        return project.standard_flow_id

    # 已经创建清洗配置则不再重新创建,只分发任务
    if not project.standard_flow_id:
        # 创建清洗配置
        res, flow_id = setup_clean(username, cc_app_id,
                                   project.standard_data_id,
                                   DataType.SLOG.value)
        if not res:
            notify_manager(u"启动标准日志采集清洗任务失败[%s],原因:%s,请关注" %
                           (project_id, flow_id))
            return False, u"启动标准日志采集清洗任务失败:%s" % flow_id
    else:
        flow_id = project.standard_flow_id

    # 启动分发任务
    res2, flow_task_id = setup_shipper(project.standard_data_id, flow_id,
                                       DataType.SLOG.value)
    if not res2:
        notify_manager(u"启动标准日志采集分发任务失败[%s],原因:%s,请关注" %
                       (project_id, flow_task_id))
        return False, u"启动标准日志采集分发任务失败:%s" % flow_task_id
    # 将任务相关的id保存到db中,下次初始化集群则可以直接查询状态
    project.standard_flow_id = flow_id
    project.standard_flow_task_id = flow_task_id
    project.save()
    return True, u"启动标准日志采集任务成功"
Esempio n. 6
0
def create_prometheus_data_flow(username, project_id, cc_app_id, english_name,
                                dataset):
    """prometheus 类型的Metric申请数据平台的dataid,并配置默认的清洗入库规则
    """
    # 数据平台功能没有开启,则直接返回
    if not IS_DATA_OPEN:
        return True, u"数据平台功能暂未开启"

    # 1. 提交接入部署计划,获取dataid
    is_ok, data_id = deploy_plan(username, cc_app_id, dataset,
                                 DataType.METRIC.value)
    if not is_ok:
        notify_manager(
            u"申请Prometheus Metric dataid[业务ID:%s,项目名:%s]失败,原因:%s,请关注" %
            (cc_app_id, english_name, data_id))
        return False, "申请Prometheus Metric dataid:%s" % data_id

    # 2. 创建清洗配置,并启动清洗任务
    res, result_table_id = setup_clean(username, cc_app_id, data_id,
                                       DataType.METRIC.value)
    if not res:
        notify_manager(u"创建Prometheus Metric清洗任务失败[%s],原因:%s,请关注" %
                       (english_name, result_table_id))
        return False, u"创建Prometheus Metric清洗任务失败:%s" % result_table_id

    # 3. 创建分发存储,并启动对应的分发任务
    res2, msg = setup_shipper(data_id, result_table_id, DataType.METRIC.value)
    if not res2:
        notify_manager(u"启动非标准日志采集分发任务失败[%s],原因:%s,请关注" % (project_id, msg))
        return False, u"启动非标准日志采集分发任务失败:%s" % msg
    return True, data_id
Esempio n. 7
0
def create_prometheus_data_flow(username, project_id, cc_app_id, english_name, dataset):
    """prometheus 类型的Metric申请数据平台的dataid,并配置默认的清洗入库规则
    """
    # 数据平台功能没有开启,则直接返回
    if not IS_DATA_OPEN:
        return True, _("数据平台功能暂未开启")

    # 1. 提交接入部署计划,获取dataid
    is_ok, data_id = deploy_plan(username, cc_app_id, dataset, DataType.METRIC.value)
    if not is_ok:
        message = '''{prefix_msg}Prometheus Metric dataid[{biz}ID:{cc_app_id},{project}:{english_name}]{fail},
        {reason}:{data_id},{suffix_msg}'''.format(
            prefix_msg=_("申请标准日志采集"),
            biz=_("业务"),
            cc_app_id=cc_app_id,
            project=_("项目名"),
            english_name=english_name,
            fail=_("失败"),
            reason=_("原因"),
            data_id=data_id,
            suffix_msg=_("失败")
        )
        notify_manager(message)
        return False, '{}Prometheus Metric dataid:{}'.format(_("申请"), data_id)

    # 2. 创建清洗配置,并启动清洗任务
    res, result_table_id = setup_clean(username, cc_app_id, data_id, DataType.METRIC.value)
    if not res:
        notify_manager('''{prefix_msg}Prometheus Metric{clean_task}[{english_name}],
        {reason}:{table_id},{suffix_msg}'''.format(
            prefix_msg=_("创建"),
            clean_task=_("清洗任务失败"),
            english_name=english_name,
            reason=_("原因"),
            table_id=result_table_id,
            suffix_msg=_("请关注")
        ))

        return False, '{}Prometheus Metric{}:{}'.format(_("创建"), _("清洗任务失败"), result_table_id)

    # 3. 创建分发存储,并启动对应的分发任务
    res2, msg = setup_shipper(data_id, result_table_id, DataType.METRIC.value)
    if not res2:
        message = '{prefix_msg}[{project_id}],{reason}:{msg},{suffix_msg}'.format(
            prefix_msg=_("启动非标准日志采集分发任务失败"),
            project_id=project_id,
            reason=_("原因"),
            msg=msg,
            suffix_msg=_("请关注")
        )
        notify_manager(message)
        return False, '{}:{}'.format(_("启动非标准日志采集分发任务失败"), msg)
    return True, data_id
Esempio n. 8
0
def create_prometheus_data_flow(username, project_id, cc_app_id, english_name,
                                dataset):
    """prometheus 类型的Metric申请数据平台的dataid,并配置默认的清洗入库规则
    """
    # 数据平台功能没有开启,则直接返回
    if not IS_DATA_OPEN:
        return True, _("数据平台功能暂未开启")

    # 1. 提交接入部署计划,获取dataid
    is_ok, data_id = deploy_plan(username, cc_app_id, dataset,
                                 DataType.METRIC.value)
    if not is_ok:
        message = _(
            '申请标准日志采集Prometheus Metric dataid[业务ID:{cc_app_id},项目名:{english_name}]失败,原因:{data_id},失败'
        ).format(
            cc_app_id=cc_app_id,
            english_name=english_name,
            data_id=data_id,
        )
        notify_manager(message)
        return False, _('申请Prometheus Metric dataid:{}').format(data_id)

    # 2. 创建清洗配置,并启动清洗任务
    res, result_table_id = setup_clean(username, cc_app_id, data_id,
                                       DataType.METRIC.value)
    if not res:
        notify_manager(
            _('创建Prometheus Metric清洗任务失败[{english_name}],原因:{table_id}请关注').
            format(
                english_name=english_name,
                table_id=result_table_id,
            ))

        return False, _('创建Prometheus Metric清洗任务失败:{}').format(result_table_id)

    # 3. 创建分发存储,并启动对应的分发任务
    res2, msg = setup_shipper(data_id, result_table_id, DataType.METRIC.value)
    if not res2:
        message = _('启动非标准日志采集分发任务失败[{}],原因:{},请关注').format(project_id,
                                                            msg=msg)
        notify_manager(message)
        return False, _('启动非标准日志采集分发任务失败:{}').format(msg)
    return True, data_id
Esempio n. 9
0
def create_data_project(username, project_id, cc_app_id, english_name):
    """
    @summary: 在数据平台上创建项目信息
    @note: 创建项目时调用,关联了蓝鲸业务的项目才需要创建
    """
    # 数据平台功能没有开启,则直接返回
    if not IS_DATA_OPEN:
        return True

    project, _c = ProjectDataInfo.objects.get_or_create(project_id=project_id, defaults={'cc_biz_id': cc_app_id})
    # 判断项目是否已经创建,已经创建则不再重复创建
    if all([project.data_project_id, project.standard_data_id, project.non_standard_data_id]):
        return True

    # 申请标准日志采集 dataid, standard_data_name 修改 db 中的字段信息
    standard_data_name = f'{DataType.SLOG.value}_{english_name}'
    res1, standard_data_id = deploy_plan(username, cc_app_id, standard_data_name, DataType.SLOG.value)
    if not res1:
        message = '''{prefix_msg}dataid{biz}ID:{cc_app_id},{project}:{english_name}{fail},
        {reason}:{standard_data_id},{suffix_msg}'''.format(
            prefix_msg=_("申请标准日志采集"),
            biz=_("业务"),
            cc_app_id=cc_app_id,
            project=_("项目名"),
            english_name=english_name,
            fail=_("失败"),
            reason=_("原因"),
            standard_data_id=standard_data_id,
            suffix_msg=_("失败")
        )
        notify_manager(message)
        raise error_codes.APIError('{}dataid{}:{}'.format(_("申请标准日志采集"), _("失败"), standard_data_id))

    # 申请非标准日志采集 dataid
    non_standard_data_name = f'{DataType.CLOG.value}_{english_name}'
    res2, non_standard_data_id = deploy_plan(username, cc_app_id, non_standard_data_name, DataType.CLOG.value)
    if not res2:
        message = '''{prefix_msg}dataid{biz}ID:{cc_app_id},{project}:{english_name}{fail},
        {reason}:{standard_data_id},{suffix_msg}'''.format(
            prefix_msg=_("申请非标准日志采集"),
            biz=_("业务"),
            cc_app_id=cc_app_id,
            project=_("项目名"),
            english_name=english_name,
            fail=_("失败"),
            reason=_("原因"),
            standard_data_id=standard_data_id,
            suffix_msg=_("请关注")
        )
        notify_manager(message)
        raise error_codes.APIError('{}dataid{}:{}'.format(_("申请非标准日志采集"), _("失败"), standard_data_id))

    # 数据平台 V3 API 没有project_id的概念,给一个默认的值
    project.data_project_id = 1

    project.standard_data_id = standard_data_id
    project.standard_data_name = standard_data_name
    project.non_standard_data_id = non_standard_data_id
    project.non_standard_data_name = non_standard_data_name
    project.save()
    return True