Beispiel #1
0
def connectors_status_check():
    SERVER_LIST = ALI_SERVER_LIST

    for server_ip, server_port in SERVER_LIST:
        url = CONNECTORS_URL % (server_ip, server_port)
        r = requests.get(url)
        connectors = r.json()
        for connector in connectors:
            c_url = STATUS_URL % (server_ip, server_port, connector)
            logging.info('connector url %s', c_url)
            c_r = requests.get(c_url)
            content = c_r.json()
            tasks = content['tasks']
            for task in tasks:
                if task['state'] != 'RUNNING':
                    #钉钉报警
                    dingding_alet = DingdingAlert(
                        'https://oapi.dingtalk.com/robot/send?access_token=928e66bef8d88edc89fe0f0ddd52bfa4dd28bd4b1d24ab4626c804df8878bb48'
                    )
                    msg = """
                        DW kafka connectors状态异常,请检查。
                        {connector}, task_len:{task_len}, error_task_id:{task_id}, error_msg:{error_msg}
                    """.format(connector=connector,
                               task_len=len(tasks),
                               task_id=task['id'],
                               error_msg=task['trace'])
                    dingding_alet.send(msg)
                    # 重启任务
                    restart_url = TASK_RESTART_URL % (server_ip, server_port,
                                                      connector, task['id'])
                    requests.post(restart_url)
                    logging.info(
                        'ip %s, port %s,  connector %s, task id %d restart. url %s',
                        server_ip, server_port, connector, task['id'],
                        restart_url)
Beispiel #2
0
def on_success_callback(context):
    # 定时最大执行延时12小时
    max_delayed_time = 43200
    # 正常执行时间
    next_execution_dt = pendulum.parse(str(context['next_execution_date']))
    next_execution_ts = next_execution_dt.int_timestamp
    # 当前时间
    now_dt = pendulum.parse('now')
    now_ts = now_dt.int_timestamp

    time_diff = now_ts - next_execution_ts

    if time_diff >= max_delayed_time:
        # 钉钉报警
        dingding_alert = DingdingAlert(
            'https://oapi.dingtalk.com/robot/send?access_token=928e66bef8d88edc89fe0f0ddd52bfa4dd28bd4b1d24ab4626c804df8878bb48'
        )
        task = "{dag}.{task}".format(dag=context['task_instance'].dag_id,
                                     task=context['task_instance'].task_id)
        msg = "任务回溯操作{task},计划执行时间:{ne},当前执行时间:{nt}".format(
            task=task, ne=next_execution_dt, nt=now_dt)
        dingding_alert.send('DW {msg} 产出超时'.format(msg=msg))
Beispiel #3
0
class TaskTouchzSuccess(object):
    def __init__(self):
        self.dingding_alet = DingdingAlert(
            'https://oapi.dingtalk.com/robot/send?access_token=928e66bef8d88edc89fe0f0ddd52bfa4dd28bd4b1d24ab4626c804df8878bb48'
        )
        self.table_name = ""
        self.hdfs_data_dir_str = ""
        self.db_name = ""
        self.ds = ""

    def set_touchz_success(self, tables):

        try:

            for item in tables:

                self.table_name = item.get('table', None)
                self.hdfs_data_dir_str = item.get('hdfs_path', None)

            #判断数据文件是否为0
            line_str = "$HADOOP_HOME/bin/hadoop fs -du -s {hdfs_data_dir} | tail -1 | awk \'{{print $1}}\'".format(
                hdfs_data_dir=self.hdfs_data_dir_str)

            logging.info(line_str)

            with os.popen(line_str) as p:
                line_num = p.read()

            #数据为0,发微信报警通知
            if line_num[0] == str(0):

                self.dingding_alet.send('DW调度系统任务 {jobname} 数据产出异常'.format(
                    jobname=self.table_name))

                logging.info("Error : {hdfs_data_dir} is empty".format(
                    hdfs_data_dir=self.hdfs_data_dir_str))
                sys.exit(1)

            else:
                succ_str = "$HADOOP_HOME/bin/hadoop fs -touchz {hdfs_data_dir}/_SUCCESS".format(
                    hdfs_data_dir=self.hdfs_data_dir_str)

                logging.info(succ_str)

                os.popen(succ_str)

                time.sleep(10)

                logging.info("DATA EXPORT Successed ......")

        except Exception as e:

            logging.info(e)

            sys.exit(1)

    def get_country_code(self):

        cursor = get_hive_cursor()

        #获取二位国家码
        get_sql = '''

        select concat_ws(',',collect_set(country_code)) as country_code from {db}.{table} WHERE dt='{pt}'
    
        '''.format(pt=self.ds, table=self.table_name, db=self.db_name)

        cursor.execute(get_sql)

        res = cursor.fetchone()

        if len(res[0]) > 1:
            country_code_list = res[0]

            logging.info('Executing 二位国家码: %s', country_code_list)

        else:

            country_code_list = "nal"

            logging.info('Executing 二位国家码为空,赋予默认值 %s', country_code_list)

        return country_code_list

    def check_success_exist(self):
        """
            验证_SUCCESS是否执行成功
        """

        time.sleep(15)

        print("debug-> check_success_exist")

        command = "hadoop dfs -ls {hdfs_data_dir}/_SUCCESS>/dev/null 2>/dev/null && echo 1 || echo 0".format(
            hdfs_data_dir=self.hdfs_data_dir_str)

        logging.info(command)

        out = os.popen(command, 'r')
        res = out.readlines()

        res = 0 if res is None else res[0].lower().strip()
        out.close()

        #判断 _SUCCESS 文件是否生成
        if res == '' or res == 'None' or res[0] == '0':
            logging.info("_SUCCESS 验证失败")

            sys.exit(1)

        else:

            logging.info("_SUCCESS 验证成功")

    def delete_exist_partition(self):
        """
            删除已有分区,保证数据唯一性
        """

        time.sleep(10)

        print("debug-> delete_exist_partition")

        #删除语句
        del_command = "hadoop dfs -rm -r {hdfs_data_dir}".format(
            hdfs_data_dir=self.hdfs_data_dir_str)

        logging.info(del_command)

        os.popen(del_command, 'r')

        time.sleep(10)

        #验证删除分区是否存在
        check_command = "hadoop dfs -ls {hdfs_data_dir}>/dev/null 2>/dev/null && echo 1 || echo 0".format(
            hdfs_data_dir=self.hdfs_data_dir_str)

        out = os.popen(check_command, 'r')

        res = out.readlines()

        res = 0 if res is None else res[0].lower().strip()
        out.close()

        print(res)

        #判断 删除分区是否存在
        if res == '' or res == 'None' or res[0] == '0':

            logging.info("目录删除成功")

        else:

            #目录存在
            logging.info("目录删除失败:" + " " + "{hdfs_data_dir}".format(
                hdfs_data_dir=self.hdfs_data_dir_str))

            sys.exit(1)

    def data_not_file_type_touchz(self):
        """
            非空文件 touchz _SUCCESS
        """

        try:

            print("debug-> data_not_file_type_touchz")

            mkdir_str = "$HADOOP_HOME/bin/hadoop fs -mkdir -p {hdfs_data_dir}".format(
                hdfs_data_dir=self.hdfs_data_dir_str)

            logging.info(mkdir_str)

            os.popen(mkdir_str)

            time.sleep(10)

            succ_str = "$HADOOP_HOME/bin/hadoop fs -touchz {hdfs_data_dir}/_SUCCESS".format(
                hdfs_data_dir=self.hdfs_data_dir_str)

            logging.info(succ_str)

            os.popen(succ_str)

            logging.info("DATA EXPORT Successed ......")

            self.check_success_exist()

        except Exception as e:

            logging.info(e)

            sys.exit(1)

    def data_file_type_touchz(self):
        """
            空文件 touchz _SUCCESS
        """

        try:

            print("debug-> data_file_type_touchz")

            #判断数据文件是否为0
            line_str = "$HADOOP_HOME/bin/hadoop fs -du -s {hdfs_data_dir} | tail -1 | awk \'{{print $1}}\'".format(
                hdfs_data_dir=self.hdfs_data_dir_str)

            logging.info(line_str)

            with os.popen(line_str) as p:
                line_num = p.read()

            #数据为0,发微信报警通知
            if line_num[0] == str(0):

                self.dingding_alet.send('DW调度系统任务 {jobname} 数据产出异常'.format(
                    jobname=self.table_name))

                logging.info("Error : {hdfs_data_dir} is empty".format(
                    hdfs_data_dir=self.hdfs_data_dir_str))
                sys.exit(1)

            else:

                time.sleep(5)

                succ_str = "hadoop fs -touchz {hdfs_data_dir}/_SUCCESS".format(
                    hdfs_data_dir=self.hdfs_data_dir_str)

                logging.info(succ_str)

                os.popen(succ_str)

                logging.info("DATA EXPORT Successed ......")

            self.check_success_exist()

        except Exception as e:

            logging.info(e)

            sys.exit(1)

    def del_path(self,
                 ds,
                 db_name,
                 table_name,
                 data_hdfs_path,
                 country_partition="true",
                 file_type="true",
                 hour=None):
        """
        country_partition:是否有国家分区
        file_type:是否空文件也生成 success
            
        """

        try:

            self.db_name = db_name
            self.ds = ds
            self.table_name = table_name

            # 没有国家分区并且每个目录必须有数据才能生成 Success
            if country_partition.lower() == "false" and file_type.lower(
            ) == "true":

                if hour is None:
                    #输出不同国家的数据路径
                    self.hdfs_data_dir_str = data_hdfs_path + "/dt=" + self.ds
                else:
                    #输出不同国家的数据路径
                    self.hdfs_data_dir_str = data_hdfs_path + "/dt=" + self.ds + "/hour=" + hour

                self.delete_exist_partition()

                return

            # 没有国家分区并且数据为空也生成 Success
            if country_partition.lower() == "false" and file_type.lower(
            ) == "false":

                if hour is None:
                    #输出不同国家的数据路径
                    self.hdfs_data_dir_str = data_hdfs_path + "/dt=" + self.ds
                else:
                    #输出不同国家的数据路径
                    self.hdfs_data_dir_str = data_hdfs_path + "/dt=" + self.ds + "/hour=" + hour

                self.delete_exist_partition()

                return

            #获取国家列表
            country_code_list = self.get_country_code()

            for country_code_word in country_code_list.split(","):

                #有国家分区并且每个目录必须有数据才能生成 Success
                if country_partition.lower() == "true" and file_type.lower(
                ) == "true":

                    if hour is None:

                        #输出不同国家的数据路径
                        self.hdfs_data_dir_str = data_hdfs_path + "/country_code=" + country_code_word + "/dt=" + self.ds
                    else:

                        #输出不同国家的数据路径
                        self.hdfs_data_dir_str = data_hdfs_path + "/country_code=" + country_code_word + "/dt=" + self.ds + "/hour=" + hour

                    self.delete_exist_partition()

                #有国家分区并且数据为空也生成 Success
                if country_partition.lower() == "true" and file_type.lower(
                ) == "false":

                    if hour is None:

                        #输出不同国家的数据路径
                        self.hdfs_data_dir_str = data_hdfs_path + "/country_code=" + country_code_word + "/dt=" + self.ds
                    else:

                        #输出不同国家的数据路径
                        self.hdfs_data_dir_str = data_hdfs_path + "/country_code=" + country_code_word + "/dt=" + self.ds + "/hour=" + hour

                    self.delete_exist_partition()

        except Exception as e:

            logging.info(e)

            sys.exit(1)

    def countries_touchz_success(self,
                                 ds,
                                 db_name,
                                 table_name,
                                 data_hdfs_path,
                                 country_partition="true",
                                 file_type="true",
                                 hour=None):
        """
        country_partition:是否有国家分区
        file_type:是否空文件也生成 success
            
        """

        try:

            self.db_name = db_name
            self.ds = ds
            self.table_name = table_name

            # 没有国家分区并且每个目录必须有数据才能生成 Success
            if country_partition.lower() == "false" and file_type.lower(
            ) == "true":

                if hour is None:
                    #输出不同国家的数据路径
                    self.hdfs_data_dir_str = data_hdfs_path + "/dt=" + self.ds
                else:
                    #输出不同国家的数据路径
                    self.hdfs_data_dir_str = data_hdfs_path + "/dt=" + self.ds + "/hour=" + hour

                self.data_file_type_touchz()

                return

            # 没有国家分区并且数据为空也生成 Success
            if country_partition.lower() == "false" and file_type.lower(
            ) == "false":

                if hour is None:
                    #输出不同国家的数据路径
                    self.hdfs_data_dir_str = data_hdfs_path + "/dt=" + self.ds
                else:
                    #输出不同国家的数据路径
                    self.hdfs_data_dir_str = data_hdfs_path + "/dt=" + self.ds + "/hour=" + hour

                self.data_not_file_type_touchz()

                return

            #获取国家列表
            country_code_list = self.get_country_code()

            for country_code_word in country_code_list.split(","):

                #有国家分区并且每个目录必须有数据才能生成 Success
                if country_partition.lower() == "true" and file_type.lower(
                ) == "true":

                    if hour is None:

                        #输出不同国家的数据路径
                        self.hdfs_data_dir_str = data_hdfs_path + "/country_code=" + country_code_word + "/dt=" + self.ds
                    else:

                        #输出不同国家的数据路径
                        self.hdfs_data_dir_str = data_hdfs_path + "/country_code=" + country_code_word + "/dt=" + self.ds + "/hour=" + hour

                    self.data_file_type_touchz()

                #有国家分区并且数据为空也生成 Success
                if country_partition.lower() == "true" and file_type.lower(
                ) == "false":

                    if hour is None:

                        #输出不同国家的数据路径
                        self.hdfs_data_dir_str = data_hdfs_path + "/country_code=" + country_code_word + "/dt=" + self.ds
                    else:

                        #输出不同国家的数据路径
                        self.hdfs_data_dir_str = data_hdfs_path + "/country_code=" + country_code_word + "/dt=" + self.ds + "/hour=" + hour

                    self.data_not_file_type_touchz()

        except Exception as e:

            logging.info(e)

            sys.exit(1)
class TaskHourSuccessCountMonitor(object):
    def __init__(self, ds, v_info):

        self.dingding_alert = DingdingAlert(
            'https://oapi.dingtalk.com/robot/send?access_token=928e66bef8d88edc89fe0f0ddd52bfa4dd28bd4b1d24ab4626c804df8878bb48'
        )

        self.v_info = v_info

        self.v_data_dir = ""

        self.start_timeThour = ""
        self.end_dateThour = ""
        self.partition = ""

        self.less_res = []
        self.greater_res = []

        self.log_unite_dist = {}

        self.start_time = ""
        self.end_time = ""

    def get_partition_list(self):
        """
            获取小时级分区所有_SUCCESS文件
        """

        command = "hadoop dfs -ls {data_dir}/hour=*/_SUCCESS|awk -F\"hour=\" \'{{print $2}}\'|tr \"\\n\" \",\"|sed -e 's/,$/\\n/'".format(
            data_dir=self.v_data_dir)

        logging.info(command)

        out = os.popen(command, 'r')
        res = out.readlines()

        res[0] = 0 if res[0] is None else res[0].lower().strip()
        out.close()

        #判断 _SUCCESS 文件是否生成
        if res[0] == '' or res[0] == 'None' or res[0] == '0':
            logging.info("_SUCCESS list 获取失败")

            sys.exit(1)

        else:

            return res[0]

    def number_rebuild(self, s):
        """
            将基准小时格式进行格式化(1->01)
        """

        n = str(s)

        if len(n) < 2:
            s_nm = "0" + n
        else:
            s_nm = n

        return s_nm

    def nm_less_diff(self, s):
        """
            小于时间范围的判断
        """

        sylstr = str(s) + "/_SUCCESS"

        #每个数字前增加 1(01->101)
        v_in_number = "1" + self.syl

        if int(s) <= int(v_in_number):

            if sylstr not in self.less_res:
                self.less_res.append(sylstr)

    def nm_greater_diff(self, s):
        """
            大于时间范围的判断
        """

        sylstr = str(s) + "/_SUCCESS"

        #每个数字前增加 1(01->101)
        v_in_number = "1" + self.syl

        if int(s) >= int(v_in_number):

            if sylstr not in self.greater_res:

                self.greater_res.append(sylstr)

    def summary_results(self, depend_data_dir, symbol, start_hour):
        """
            分支sub函数
        """

        #对比符号("<" and ">")
        symbol = symbol.strip()

        #数据目录分区地址
        self.v_data_dir = depend_data_dir.strip()

        self.syl = self.number_rebuild(start_hour)

        res_list = []

        #获取分区列表
        partition_list = self.get_partition_list()

        for i in partition_list.split(","):

            #将原有小时分区,前面加1,进行数据对比
            source_nm = int("1" + i.split("/")[0])

            if symbol == "<":

                self.nm_less_diff(source_nm)

            if symbol == ">":

                self.nm_greater_diff(source_nm)

        if symbol == "<":

            res_list = self.less_res

            #输入日志
            self.log_unite_dist[self.end_time] = res_list

        if symbol == ">":

            res_list = self.greater_res

            #输入日志
            self.log_unite_dist[self.start_time] = res_list

        return len(res_list)

    def HourSuccessCountMonitor(self):
        """
            主函数
        """

        for item in self.v_info:

            #Json 变量信息
            start_timeThour = item.get('start_timeThour', None)
            end_dateThour = item.get('end_dateThour', None)
            depend_dir = item.get('depend_dir', None)
            table_name = item.get('table', None)

            #开始日期和小时
            self.start_time = start_timeThour.split("T")[0]
            start_time_hour = start_timeThour.split("T")[1]

            #开始依赖小时路径
            depend_start_dir = depend_dir + "/" + table_name + "/dt=" + self.start_time

            #结束日期和小时
            self.end_time = end_dateThour.split("T")[0]
            end_time_hour = end_dateThour.split("T")[1]

            #结束依赖小时路径
            depend_end_dir = depend_dir + "/" + table_name + "/dt=" + self.end_time

        #开始时间与结束时间不相同时
        if self.start_time != self.end_time:

            #统计依赖小时级分区个数
            hour_res_nm = self.summary_results(
                depend_start_dir, ">", start_time_hour) + self.summary_results(
                    depend_end_dir, "<", end_time_hour)

        #开始时间与结束时间相同时
        if self.start_time == self.end_time:

            #统计依赖小时级分区个数
            hour_res_nm = self.summary_results(depend_start_dir, ">",
                                               start_time_hour)

        logging.info(self.log_unite_dist)

        #不等于24,属于依赖不成立
        if hour_res_nm != 24:

            logging.info(
                "小时级分区文件SUCCESS 个数 {hour_res_nm} 不完整,异常退出.....".format(
                    hour_res_nm=hour_res_nm))

            self.dingding_alert.send(
                "DW 依赖数据源 {table_name} 小时级分区文件SUCCESS 个数 {hour_res_nm} 缺失,异常退出....."
                .format(hour_res_nm=hour_res_nm, table_name=table_name))

            sys.exit(1)
        else:
            logging.info("小时级分区文件SUCCESS 个数 {hour_res_nm} 完整,依赖成功".format(
                hour_res_nm=hour_res_nm))
            self.log_unite_dist = {}
class CountriesPublicFrame(object):
    def __init__(self,
                 v_is_open,
                 v_ds,
                 v_db_name,
                 v_table_name,
                 v_data_hdfs_path,
                 v_country_partition="true",
                 v_file_type="true",
                 v_hour=None):

        #self.comwx = ComwxApi('wwd26d45f97ea74ad2', 'BLE_v25zCmnZaFUgum93j3zVBDK-DjtRkLisI_Wns4g', '1000011')

        self.dingding_alert = DingdingAlert(
            'https://oapi.dingtalk.com/robot/send?access_token=928e66bef8d88edc89fe0f0ddd52bfa4dd28bd4b1d24ab4626c804df8878bb48'
        )

        self.table_name = v_table_name
        self.hdfs_data_dir_str = ""
        self.data_hdfs_path = v_data_hdfs_path
        self.db_name = v_db_name
        self.ds = v_ds
        self.country_partition = v_country_partition
        self.file_type = v_file_type
        self.hour = v_hour
        self.is_open = v_is_open
        self.v_del_flag = 0

        self.v_country_code_map = None

        self.country_code_list = ""

        self.get_country_code()

    def get_country_code(self):
        """
            获取当前表中所有二位国家码
        """

        if self.is_open.lower() == "false":

            self.country_code_list = "nal"

        if self.is_open.lower() == "true":

            self.v_country_code_map = eval(Variable.get("country_code_dim"))

            s = list(self.v_country_code_map.keys())

            self.country_code_list = ",".join(s)

    def check_success_exist(self):
        """
            验证_SUCCESS是否执行成功
        """

        time.sleep(15)

        print("debug-> check_success_exist")

        command = "hadoop fs -ls {hdfs_data_dir}/_SUCCESS>/dev/null 2>/dev/null && echo 1 || echo 0".format(
            hdfs_data_dir=self.hdfs_data_dir_str)

        #logging.info(command)

        out = os.popen(command, 'r')
        res = out.readlines()

        res = 0 if res is None else res[0].lower().strip()
        out.close()

        #判断 _SUCCESS 文件是否生成
        if res == '' or res == 'None' or res[0] == '0':
            logging.info("_SUCCESS 验证失败")

            sys.exit(1)

        else:

            logging.info("_SUCCESS 验证成功")

    def delete_exist_partition(self):
        """
            删除已有分区,保证数据唯一性
        """

        time.sleep(10)

        print("debug-> delete_exist_partition")

        #删除语句
        del_command = "hadoop fs -rm -r {hdfs_data_dir}".format(
            hdfs_data_dir=self.hdfs_data_dir_str)

        logging.info(del_command)

        os.popen(del_command, 'r')

        time.sleep(10)

        #验证删除分区是否存在
        check_command = "hadoop fs -ls {hdfs_data_dir}>/dev/null 2>/dev/null && echo 1 || echo 0".format(
            hdfs_data_dir=self.hdfs_data_dir_str)

        out = os.popen(check_command, 'r')

        res = out.readlines()

        res = 0 if res is None else res[0].lower().strip()
        out.close()

        print(res)

        #判断 删除分区是否存在
        if res == '' or res == 'None' or res[0] == '0':

            logging.info("目录删除成功")

        else:

            #目录存在
            logging.info("目录删除失败:" + " " + "{hdfs_data_dir}".format(
                hdfs_data_dir=self.hdfs_data_dir_str))

    def data_not_file_type_touchz(self):
        """
            非空文件 touchz _SUCCESS
        """

        try:

            print("debug-> data_not_file_type_touchz")

            mkdir_str = "$HADOOP_HOME/bin/hadoop fs -mkdir -p {hdfs_data_dir}".format(
                hdfs_data_dir=self.hdfs_data_dir_str)

            logging.info(mkdir_str)

            os.popen(mkdir_str)

            time.sleep(10)

            succ_str = "$HADOOP_HOME/bin/hadoop fs -touchz {hdfs_data_dir}/_SUCCESS".format(
                hdfs_data_dir=self.hdfs_data_dir_str)

            logging.info(succ_str)

            os.popen(succ_str)

            logging.info("DATA EXPORT Successed ......")

            self.check_success_exist()

        except Exception as e:

            #self.dingding_alert.send('DW调度系统任务 {jobname} 数据产出异常'.format(jobname=self.table_name))

            logging.info(e)

            sys.exit(1)

    def data_file_type_touchz(self):
        """
            创建 _SUCCESS
        """

        try:

            print("debug-> data_file_type_touchz")

            #判断数据文件是否为0
            line_str = "$HADOOP_HOME/bin/hadoop fs -du -s {hdfs_data_dir} | tail -1 | awk \'{{print $1}}\'".format(
                hdfs_data_dir=self.hdfs_data_dir_str)

            logging.info(line_str)

            with os.popen(line_str) as p:
                line_num = p.read()

            #数据为0,发微信报警通知
            if line_num[0] == str(0):

                self.dingding_alert.send('DW调度系统任务 {jobname} 数据产出异常'.format(
                    jobname=self.table_name))

                logging.info("Error : {hdfs_data_dir} is empty".format(
                    hdfs_data_dir=self.hdfs_data_dir_str))
                sys.exit(1)

            else:

                time.sleep(5)

                succ_str = "hadoop fs -touchz {hdfs_data_dir}/_SUCCESS".format(
                    hdfs_data_dir=self.hdfs_data_dir_str)

                logging.info(succ_str)

                os.popen(succ_str)

                logging.info("DATA EXPORT Successed ......")

            self.check_success_exist()

        except Exception as e:

            #self.dingding_alert.send('DW调度系统任务 {jobname} 数据产出异常'.format(jobname=self.table_name))

            logging.info(e)

            sys.exit(1)

    def delete_partition(self):
        """
            删除分区调用函数
        """

        self.v_del_flag = 1

        if self.country_partition.lower() == "false":

            self.not_exist_country_code_data_dir(self.delete_exist_partition)

        #有国家分区
        if self.country_partition.lower() == "true":

            self.exist_country_code_data_dir(self.delete_exist_partition)

        self.v_del_flag = 0

    def touchz_success(self):
        """
            生成 Success 函数
        """

        # 没有国家分区并且每个目录必须有数据才能生成 Success
        if self.country_partition.lower() == "false" and self.file_type.lower(
        ) == "true":

            self.not_exist_country_code_data_dir(self.data_file_type_touchz)

        # 没有国家分区并且数据为空也生成 Success
        if self.country_partition.lower() == "false" and self.file_type.lower(
        ) == "false":

            self.not_exist_country_code_data_dir(
                self.data_not_file_type_touchz)

        #有国家分区并且每个目录必须有数据才能生成 Success
        if self.country_partition.lower() == "true" and self.file_type.lower(
        ) == "true":

            self.exist_country_code_data_dir(self.data_file_type_touchz)

        #有国家分区并且数据为空也生成 Success
        if self.country_partition.lower() == "true" and self.file_type.lower(
        ) == "false":

            self.exist_country_code_data_dir(self.data_not_file_type_touchz)

    #没有国家码分区
    def not_exist_country_code_data_dir(self, object_task):
        """
        country_partition:是否有国家分区
        file_type:是否空文件也生成 success
            
        """

        try:

            #没有小时级分区
            if self.hour is None:
                #输出不同国家的数据路径(没有小时级分区)
                self.hdfs_data_dir_str = self.data_hdfs_path + "/dt=" + self.ds
            else:
                #输出不同国家的数据路径(有小时级分区)
                self.hdfs_data_dir_str = self.data_hdfs_path + "/dt=" + self.ds + "/hour=" + self.hour

            # 没有国家分区并且每个目录必须有数据才能生成 Success
            if self.country_partition.lower(
            ) == "false" and self.file_type.lower() == "true":

                object_task()

                return

            # 没有国家分区并且数据为空也生成 Success
            if self.country_partition.lower(
            ) == "false" and self.file_type.lower() == "false":

                object_task()

                return

        except Exception as e:

            #self.dingding_alert.send('DW调度系统任务 {jobname} 数据产出异常'.format(jobname=table_name))

            logging.info(e)

            sys.exit(1)

    #有国家码分区
    def exist_country_code_data_dir_dev(self, object_task):
        """
        country_partition:是否有国家分区
        file_type:是否空文件也生成 success
            
        """

        try:

            #获取国家列表

            for country_code_word in self.country_code_list.split(","):

                if country_code_word.lower() == 'nal':
                    country_code_word = country_code_word.lower()

                else:
                    country_code_word = country_code_word.upper()

                #没有小时级分区
                if self.hour is None:

                    #输出不同国家的数据路径(没有小时级分区)
                    self.hdfs_data_dir_str = self.data_hdfs_path + "/country_code=" + country_code_word + "/dt=" + self.ds
                else:

                    #输出不同国家的数据路径(有小时级分区)
                    self.hdfs_data_dir_str = self.data_hdfs_path + "/country_code=" + country_code_word + "/dt=" + self.ds + "/hour=" + self.hour

                #没有开通多国家业务(国家码默认nal)
                if self.country_partition.lower(
                ) == "true" and self.is_open.lower() == "false":

                    #必须有数据才可以生成Success 文件
                    if self.file_type.lower() == "true":

                        object_task()

                    #数据为空也生成 Success 文件
                    if self.file_type.lower() == "false":

                        object_task()

                #开通多国家业务
                if self.country_partition.lower(
                ) == "true" and self.is_open.lower() == "true":

                    #必须有数据才可以生成Success 文件
                    if self.file_type.lower() == "true":

                        #删除多国家分区使用
                        if self.v_del_flag == 1:

                            object_task()

                            continue

                        #在必须有数据条件下:国家是nal时,数据可以为空
                        if country_code_word == "nal":
                            self.data_not_file_type_touchz()

                        else:

                            object_task()

                    #数据为空也生成 Success 文件
                    if self.file_type.lower() == "false":

                        object_task()

        except Exception as e:

            #self.dingding_alert.send('DW调度系统任务 {jobname} 数据产出异常'.format(jobname=table_name))

            logging.info(e)

            sys.exit(1)

    def exist_country_code_data_dir(self, object_task):
        """
        country_partition:是否有国家分区
        file_type:是否空文件也生成 success
            
        """

        try:

            #获取国家列表

            for country_code_word in self.country_code_list.split(","):

                if country_code_word.lower() == 'nal':
                    country_code_word = country_code_word.lower()

                else:
                    country_code_word = country_code_word.upper()

                #没有小时级分区
                if self.hour is None:

                    #输出不同国家的数据路径(没有小时级分区)
                    self.hdfs_data_dir_str = self.data_hdfs_path + "/country_code=" + country_code_word + "/dt=" + self.ds
                else:

                    #输出不同国家的数据路径(有小时级分区)
                    self.hdfs_data_dir_str = self.data_hdfs_path + "/country_code=" + country_code_word + "/dt=" + self.ds + "/hour=" + self.hour

                #没有开通多国家业务(国家码默认nal)
                if self.country_partition.lower(
                ) == "true" and self.is_open.lower() == "false":

                    #必须有数据才可以生成Success 文件
                    if self.file_type.lower() == "true":

                        object_task()

                    #数据为空也生成 Success 文件
                    if self.file_type.lower() == "false":

                        object_task()

                #开通多国家业务
                if self.country_partition.lower(
                ) == "true" and self.is_open.lower() == "true":

                    #刚刚开国的国家(按照false处理)
                    if self.v_country_code_map[country_code_word].lower(
                    ) == "new":

                        #删除多国家分区使用
                        if self.v_del_flag == 1:

                            object_task()
                            continue

                        else:
                            self.data_not_file_type_touchz()

                            continue

                    #必须有数据才可以生成Success 文件
                    if self.file_type.lower() == "true":

                        #删除多国家分区使用
                        if self.v_del_flag == 1:

                            object_task()
                            continue

                        #在必须有数据条件下:国家是nal时,数据可以为空
                        if country_code_word == "nal":
                            self.data_not_file_type_touchz()

                        else:

                            object_task()

                    #数据为空也生成 Success 文件
                    if self.file_type.lower() == "false":

                        object_task()

        except Exception as e:

            #self.dingding_alert.send('DW调度系统任务 {jobname} 数据产出异常'.format(jobname=table_name))

            logging.info(e)

            sys.exit(1)

    # alter 语句
    def alter_partition(self):

        alter_str = ""

        # 没有国家分区 && 小时参数为None
        if self.country_partition.lower() == "false" and self.hour is None:

            v_par_str = "dt='{ds}'".format(ds=self.ds)

            alter_str = "alter table {db}.{table_name} drop partition({v_par});\n alter table {db}.{table_name} add partition({v_par});".format(
                v_par=v_par_str, table_name=self.table_name, db=self.db_name)

            return alter_str

        # 有国家分区 && 小时参数不为None
        if self.country_partition.lower() == "false" and self.hour is not None:

            v_par_str = "dt='{ds}',hour='{hour}'".format(ds=self.ds,
                                                         hour=self.hour)

            alter_str = "alter table {db}.{table_name} drop partition({v_par});\n alter table {db}.{table_name} add partition({v_par});".format(
                v_par=v_par_str, table_name=self.table_name, db=self.db_name)

            return alter_str

        for country_code_word in self.country_code_list.split(","):

            # 有国家分区 && 小时参数为None
            if self.country_partition.lower() == "true" and self.hour is None:

                v_par_str = "country_code='{country_code}',dt='{ds}'".format(
                    ds=self.ds, country_code=country_code_word)

                alter_str = alter_str + "\n" + "alter table {db}.{table_name} drop partition({v_par});\n alter table {db}.{table_name} add partition({v_par});".format(
                    v_par=v_par_str,
                    table_name=self.table_name,
                    db=self.db_name)

            # 有国家分区 && 小时参数不为None
            if self.country_partition.lower(
            ) == "true" and self.hour is not None:

                v_par_str = "country_code='{country_code}',dt='{ds}',hour='{hour}'".format(
                    ds=self.ds, hour=self.hour, country_code=country_code_word)

                alter_str = alter_str + "\n" + "alter table {db}.{table_name} drop partition({v_par});\n alter table {db}.{table_name} add partition({v_par});".format(
                    v_par=v_par_str,
                    table_name=self.table_name,
                    db=self.db_name)

        return alter_str