Пример #1
0
 def __init__(self):
     """    load monitor agent config"""
     self.monitor_agent_json_doc = Config.load_monitor_conf()
     """    load register config: record ku work information"""
     """    initialization ku register config"""
     self.register_json_doc = Config.load_register_conf()
     self.register_json_kutype_mon_thread_list = list()
Пример #2
0
    def validate(self, database_kutype_name=None):
        """verify the configuration"""
        if database_kutype_name is None:
            logging.error('database_kutype_name value is emputy!')
            return 1

        ku_global_conf_list = dict()
        """load global kutype mysql configure"""
        """ku_global_conf: monitotr.json/ mysql/ kutype_global_conf_table"""
        ku_global_conf = Config.load_mysql_global_kutype_conf()

        for raw in range(0, len(ku_global_conf)):
            """kutype config"""
            if ku_global_conf[raw]['data_name'] == 'ygg_ku_global_conf':
                sql_sentence = 'select * from ' + ku_global_conf[raw]['table'] + \
                                ' where kuType=\'' + database_kutype_name + '\';'
                if Config.load_agent_debug_level() >= 2:
                    modules.Util.append_log_content_to_file(Config.debug_sql_sentence_file,\
                        ' [Load ygg_ku_global_conf] sql sentence: ' + sql_sentence + '\n')
                ku_global_conf_list[ku_global_conf[raw]['data_name']] = \
                    AccessMysqlData.operating_mysql_data_by_conf(ku_global_conf[raw], sql_sentence)
            """job sla config"""
            if ku_global_conf[raw]['data_name'] == 'ygg_job_conf':
                sql_sentence = 'select * from ' + ku_global_conf[raw]['table'] + \
                                ' where kuType regexp \'(^|[,])' + database_kutype_name + ',\';'
                if Config.load_agent_debug_level() >= 2:
                    modules.Util.append_log_content_to_file(Config.debug_sql_sentence_file,\
                        ' [Load ygg_job_conf] sql sentence: ' + sql_sentence + '\n')
                ku_global_conf_list[ku_global_conf[raw]['data_name']] = \
                    AccessMysqlData.operating_mysql_data_by_conf(ku_global_conf[raw], sql_sentence)
        return ku_global_conf_list
Пример #3
0
    def run(self):
        """monitor agent main function"""
        log_content = ' - [' + self.thread_name + '] is starting.\n'
        modules.Util.append_log_content_to_file(
            self.kutype_monitor_agent_log_file, log_content)
        log_content = ' - [' + self.thread_name + '] - [START]\n'
        modules.Util.append_log_content_to_file(self.kutype_status_log_file,
                                                log_content)

        self.validate()
        while True:
            print "=================locak check======================"
            if threadLock.acquire(2):
                print self.kutype_name
                print datetime.datetime.now()
                self.load_meta_data(self.use_mysql_tablename)
                status_content = str(self.ku_total_cycle_info) + '\n'
                if Config.load_agent_debug_level() >= 4:
                    modules.Util.append_log_content_to_file(\
                            self.kutype_monitor_bs_file, status_content)
                self.load_public_strategy()
                self.load_private_strategy()
                GeneralOperation.clear_finished_build_info(
                    self.kutype_global_info)
                threadLock.release()
                time.sleep(self.load_mysql_interval)
            else:
                logging.error('lock! lock! lock!')
Пример #4
0
    def insert_mysql_data_by_table(table_name=None, sql_sentence=None):
        """load mysql data by table and sql sentence"""
        if sql_sentence is None:
            logging.error('sql_sentence value is emputy!')
            return 1

        metadata_conf = Config.load_mysql_kutype_metadata_conf()

        for raw in range(0, len(metadata_conf)):
            if table_name == metadata_conf[raw]['table']:
                if Config.load_agent_debug_level() >= 2:
                    modules.Util.append_log_content_to_file(Config.debug_sql_sentence_file,\
                        ' [insert_mysql_table] sql sentence: ' + sql_sentence + '\n')
                return AccessMysqlData.operating_mysql_data_by_conf(\
                                    metadata_conf[raw], sql_sentence)
        return 1
Пример #5
0
    def analysis(self):
        """Start KutypeFailedJobNum analysis"""
        single_job_info = list()
        single_job_status = 0
        single_job_name = ""
        failed_job_name_list = ""
        total_failed_job_count = 0

        """
        +----------+------------+
        | statusId | statusName |
        +----------+------------+
        |        0 | notstarted |
        |        1 | success    |
        |        2 | running    |
        |        3 | failed     |
        |        4 | skipped    |
        +----------+------------+
        """
        job_failed_status = 3
        job_name_regex = re.compile(self.kutype_name)

        """start analysis"""
        for single_job_info in self.metadata:
            single_job_status = single_job_info[2]
            single_job_name = single_job_info[0]
            '''
            if (job_name_regex.findall(single_job_info[0])) and\
                    single_job_status == job_failed_status:
            '''
            """Record failed job information"""
            if single_job_status == job_failed_status:
                job_error_time = single_job_info[8]
                if not job_error_time:
                    job_error_time = datetime.datetime.now()
                failed_job_name_list = single_job_name + '; ' + failed_job_name_list
                check_exists_ret = GeneralOperation.check_and_insert_already_exists_job(\
                    single_job_info, self.monitor_iterm, 'job_failed', str(job_error_time))
                if check_exists_ret == 1:
                    logging.error('insert new job info to ygg_monitor failed!')
                total_failed_job_count += 1

        """save key:value to status file"""
        if total_failed_job_count == 0:
            failed_job_name_list = 'None'
        else:
            modules.Util.append_log_content_to_file(self.kutype_status_dir +\
            '/' + str(self.single_public_strategy) + '.status', 'kutype_failed_job_list:' +\
                str(failed_job_name_list) + '\n')


        if Config.load_agent_debug_level() >= 3:
            modules.Util.append_log_content_to_file(Config.debug_public_strategy_file,\
                        ' [' + self.kutype_name + '_kutype_failed_job_num] temp data: ' +\
                        failed_job_name_list + '\n')

        return total_failed_job_count

        return 0
Пример #6
0
    def check_and_update_already_exists_kutype(single_kutype_info):
        """check and update exist kutype info"""
        """exist conditions:"""
        """         project_name, iteration_id, cycle_id, exec_id, error_name,  kutype_error_analysis """
        """Initialization check kutype information"""
        project_name = single_kutype_info[0]
        iteration_id = str(single_kutype_info[1])
        cycle_id = str(single_kutype_info[2])
        exec_id = str(single_kutype_info[3])
        kutype_error_analysis = single_kutype_info[6]
        error_name = 'kutype_monitor'

        """update information:"""
        """         kutype_current_status, kutype_errorstatus, kutype_errortime_appeartime"""
        kutype_error_status = str(single_kutype_info[7])
        kutype_current_status = str(single_kutype_info[8])
        kutype_error_appeartime = str(single_kutype_info[9])

        """Initialization abnormal mysql table name"""
        ygg_monitor_table_name = 'ygg_monitor'

        sql_sentence = 'select * from ' + ygg_monitor_table_name + ' where projectName=\'' +\
                        project_name + '\' and iterId=\'' + iteration_id + '\' and cycleId=\'' +\
                        cycle_id + '\' and execId=\'' + exec_id + '\' and errorName=\'' +\
                        error_name + '\' and errorAnalysis=\'' + kutype_error_analysis + '\';'

        if Config.load_agent_debug_level() >= 2:
            modules.Util.append_log_content_to_file(Config.debug_sql_sentence_file,\
                        ' [check_exists_kutype_check] sql sentence: ' + sql_sentence + '\n')
        mysql_return_data = AccessMysqlData.load_mysql_data_by_table(\
                                    ygg_monitor_table_name, sql_sentence)

        if mysql_return_data:
            sql_sentence = 'update ygg_monitor set errorStatus=\'' + kutype_error_status +\
                            '\', currStatus=\'' + kutype_current_status +\
                            '\' where projectName=\'' + project_name +\
                            '\' and execId=\'' + exec_id + '\' and errorName=\'' + error_name +\
                            '\' and errorAnalysis=\'' + kutype_error_analysis +\
                            '\' and iterId=\'' + iteration_id + '\'and cycleId=\'' + cycle_id + '\';'
            if Config.load_agent_debug_level() >= 2:
                modules.Util.append_log_content_to_file(Config.debug_sql_sentence_file,\
                            ' [check_exists_kutype_update] sql sentence: ' + sql_sentence + '\n')
            return AccessMysqlData.insert_mysql_data_by_table(\
                                    ygg_monitor_table_name, sql_sentence)
        else:
            return 255
Пример #7
0
    def analysis(self):
        """Start KutypeCoverCountDown analysis"""

        if int(self.kutype_coverd_time_s) == 0:
            return 0

        """single_job_info : save singel job information"""
        """job_name: save singel job name"""
        """prefix_job_name: save prefix job name"""
        """unixformat_time: temp use, save unix format time"""
        """download_job_dict: save all download job name - starttime"""
        single_job_info = list()
        job_name = ""
        prefix_job_name = ""
        unixformat_time = 0
        regex_download_name = re.compile(r'download')
        regex_kutype_name = re.compile('^' + self.kutype_name)
        download_job_dict = dict()

        """get all download job name and start time"""
        for single_job_info in self.metadata:
            job_name = single_job_info[0]
            prefix_job_name = re.split(r'-\d+', job_name)[0]

            """filter out with download field job"""
            if (regex_download_name.findall(job_name)):
                dateformat_time = single_job_info[7]
                unixformat_time = int(re.split(r'\.',\
                        str(time.mktime(dateformat_time.timetuple())))[0])

                """If there is no numeric suffix, the download job number is 1"""
                """If with a digital suffix, then take the earliest download job"""
                if (not download_job_dict.has_key(prefix_job_name)) or\
                        (unixformat_time < download_job_dict[prefix_job_name]):
                    download_job_dict[prefix_job_name] = unixformat_time

        if Config.load_agent_debug_level() >= 3:
            modules.Util.append_log_content_to_file(Config.debug_public_strategy_file,\
                ' [' + self.kutype_name + '_kutype_cover_countdown] temp data: ' +\
                str(download_job_dict) + '\n')

        """Get the latest download time as start time"""
        for find_kutype_download in download_job_dict.keys():
            """get the kutype downlaod start time"""
            """  1. Find the dwonload newest time with kutype job name"""
            """  2. initialized the bs newest time"""
            """  3. Find the dwonload newest time without kutype job name"""
            if (regex_kutype_name.findall(find_kutype_download)):
                self.bs_newest_page_unix_time = download_job_dict[find_kutype_download]
            elif (self.bs_newest_page_unix_time == 0):
                self.bs_newest_page_unix_time = download_job_dict[find_kutype_download]
            elif (self.bs_newest_page_unix_time != 0) and\
                        (download_job_dict[find_kutype_download] < self.bs_newest_page_unix_time):
                self.bs_newest_page_unix_time = download_job_dict[find_kutype_download]

        return self.calculate_cover_countdown()
Пример #8
0
    def analysis(self):
        """Start KutypeFailedJobNum analysis"""
        single_job_info = list()
        single_job_status = 0
        single_job_name = ""
        failed_job_name_list = ""
        total_failed_job_count = 0
        """
        +----------+------------+
        | statusId | statusName |
        +----------+------------+
        |        0 | notstarted |
        |        1 | success    |
        |        2 | running    |
        |        3 | failed     |
        |        4 | skipped    |
        +----------+------------+
        """
        job_failed_status = 3
        job_name_regex = re.compile(self.kutype_name)
        """start analysis"""
        for single_job_info in self.metadata:
            single_job_status = single_job_info[2]
            single_job_name = single_job_info[0]
            '''
            if (job_name_regex.findall(single_job_info[0])) and\
                    single_job_status == job_failed_status:
            '''
            """Record failed job information"""
            if single_job_status == job_failed_status:
                job_error_time = single_job_info[8]
                if not job_error_time:
                    job_error_time = datetime.datetime.now()
                failed_job_name_list = single_job_name + '; ' + failed_job_name_list
                check_exists_ret = GeneralOperation.check_and_insert_already_exists_job(\
                    single_job_info, self.monitor_iterm, 'job_failed', str(job_error_time))
                if check_exists_ret == 1:
                    logging.error('insert new job info to ygg_monitor failed!')
                total_failed_job_count += 1
        """save key:value to status file"""
        if total_failed_job_count == 0:
            failed_job_name_list = 'None'
        else:
            modules.Util.append_log_content_to_file(self.kutype_status_dir +\
            '/' + str(self.single_public_strategy) + '.status', 'kutype_failed_job_list:' +\
                str(failed_job_name_list) + '\n')

        if Config.load_agent_debug_level() >= 3:
            modules.Util.append_log_content_to_file(Config.debug_public_strategy_file,\
                        ' [' + self.kutype_name + '_kutype_failed_job_num] temp data: ' +\
                        failed_job_name_list + '\n')

        return total_failed_job_count

        return 0
Пример #9
0
    def analysis(self):
        """Start KutypeCoverCountDown analysis"""

        if int(self.kutype_coverd_time_s) == 0:
            return 0
        """single_job_info : save singel job information"""
        """job_name: save singel job name"""
        """prefix_job_name: save prefix job name"""
        """unixformat_time: temp use, save unix format time"""
        """download_job_dict: save all download job name - starttime"""
        single_job_info = list()
        job_name = ""
        prefix_job_name = ""
        unixformat_time = 0
        regex_download_name = re.compile(r'download')
        regex_kutype_name = re.compile('^' + self.kutype_name)
        download_job_dict = dict()
        """get all download job name and start time"""
        for single_job_info in self.metadata:
            job_name = single_job_info[0]
            prefix_job_name = re.split(r'-\d+', job_name)[0]
            """filter out with download field job"""
            if (regex_download_name.findall(job_name)):
                dateformat_time = single_job_info[7]
                unixformat_time = int(re.split(r'\.',\
                        str(time.mktime(dateformat_time.timetuple())))[0])
                """If there is no numeric suffix, the download job number is 1"""
                """If with a digital suffix, then take the earliest download job"""
                if (not download_job_dict.has_key(prefix_job_name)) or\
                        (unixformat_time < download_job_dict[prefix_job_name]):
                    download_job_dict[prefix_job_name] = unixformat_time

        if Config.load_agent_debug_level() >= 3:
            modules.Util.append_log_content_to_file(Config.debug_public_strategy_file,\
                ' [' + self.kutype_name + '_kutype_cover_countdown] temp data: ' +\
                str(download_job_dict) + '\n')
        """Get the latest download time as start time"""
        for find_kutype_download in download_job_dict.keys():
            """get the kutype downlaod start time"""
            """  1. Find the dwonload newest time with kutype job name"""
            """  2. initialized the bs newest time"""
            """  3. Find the dwonload newest time without kutype job name"""
            if (regex_kutype_name.findall(find_kutype_download)):
                self.bs_newest_page_unix_time = download_job_dict[
                    find_kutype_download]
            elif (self.bs_newest_page_unix_time == 0):
                self.bs_newest_page_unix_time = download_job_dict[
                    find_kutype_download]
            elif (self.bs_newest_page_unix_time != 0) and\
                        (download_job_dict[find_kutype_download] < self.bs_newest_page_unix_time):
                self.bs_newest_page_unix_time = download_job_dict[
                    find_kutype_download]

        return self.calculate_cover_countdown()
Пример #10
0
    def load_mysql_data_by_table(table_name=None, sql_sentence=None):
        """load mysql data by table and sql sentence"""
        if sql_sentence is None:
            logging.error('sql_sentence value is emputy!')
            return 1

        global_conf = Config.load_mysql_global_kutype_conf()
        metadata_conf = Config.load_mysql_kutype_metadata_conf()

        """load from global conf"""
        for raw in range(0, len(global_conf)):
            if table_name == global_conf[raw]['table']:
                return AccessMysqlData.operating_mysql_data_by_conf(\
                                    global_conf[raw], sql_sentence)

        """load from metadata conf"""
        for raw in range(0, len(metadata_conf)):
            if table_name == metadata_conf[raw]['table']:
                return AccessMysqlData.operating_mysql_data_by_conf(\
                                    metadata_conf[raw], sql_sentence)
        return 1
Пример #11
0
    def check_and_insert_already_exists_job(single_job_info, job_error_analysis,\
                                            job_errorstatus, job_error_appear_time):
        """check_and_update_already_exists_job"""
        """check and update exist job info"""
        """exist conditions:"""
        """         project_name, iteration_id, cycle_id, exec_id, job_name, job_error_analysis"""
        """Initialization check job information"""
        project_name = single_job_info[6]
        iteration_id = str(single_job_info[4])
        cycle_id = str(single_job_info[5])
        if str(single_job_info[8]) == 'None':
            """some abnormal: start time = none, end time = none"""
            single_job_info[8] = str(datetime.datetime.now())
        if str(single_job_info[7]) == 'None':
            """job status: ready -> kill"""
            single_job_info[7] = str(single_job_info[8])
        job_start_time = str(single_job_info[7])
        exec_id = str(single_job_info[9])
        job_name = single_job_info[0]

        """update information:"""
        """         job_current_status, job_errorstatus, job_error_appear_time"""
        job_current_status = str(single_job_info[2])

        """Initialization abnormal mysql table name"""
        ygg_monitor_table_name = 'ygg_monitor'

        sql_sentence = 'select * from ' + ygg_monitor_table_name + ' where projectName=\'' +\
                        project_name + '\' and iterId=\'' + iteration_id + '\' and cycleId=\'' +\
                        cycle_id + '\' and execId=\'' + exec_id + '\' and errorName=\'' + job_name +\
                        '\' and errorAnalysis=\'' + job_error_analysis + '\' and errorStarttime=\''\
                        + job_start_time +'\';'

        if Config.load_agent_debug_level() >= 2:
            modules.Util.append_log_content_to_file(Config.debug_sql_sentence_file,\
                        ' [check_exists_job] sql sentence: ' + sql_sentence + '\n')
        mysql_return_data = AccessMysqlData.load_mysql_data_by_table(\
                                    ygg_monitor_table_name, sql_sentence)

        if mysql_return_data:
            '''
            sql_sentence = 'update ygg_monitor set errorStatus=\'' + job_errorstatus +\
                            '\', currStatus=\'' + job_current_status +\
                            '\' where projectName=\'' + project_name +\
                            '\' and execId=\'' + exec_id + '\' and errorName=\'' + job_name +\
                            '\' and errorAnalysis=\'' + job_error_analysis + '\';'
            return AccessMysqlData.insert_mysql_data_by_table(\
                                    ygg_monitor_table_name, sql_sentence)
            '''
            return 0
        else:
            return GeneralOperation.insert_ygg_monitor_job(single_job_info, job_error_analysis,\
                                                            job_errorstatus, job_error_appear_time)
Пример #12
0
    def calculate_cover_countdown(self):
        """Initialization record abnormal kutype information"""
        project_link = self.compass_domain + '/manager?project='+\
                                self.project_name + '&flow=end#executions'
        flow_link = self.compass_domain + '/executor?execid='
        """Calculate the coverage countdown"""
        self.current_unix_time = int(re.split(r'\.', str(time.time()))[0])
        countdown_time_s = self.bs_newest_page_unix_time + self.kutype_coverd_time_s\
                            - self.current_unix_time
        countdown_time_h = countdown_time_s / 3600.0
        countdown_time_h = ("%.2f" % countdown_time_h)
        countdown_minus_buffer_s = countdown_time_s - self.kutype_coverd_buffer_s
        """record abnormal kutype information"""
        get_kutype_info_instance = GetFlowinfoByCursor(self.metadata,\
                             self.iteration_cycle_cursor, self.bs_newest_page_unix_time)
        kutype_information_list = get_kutype_info_instance.get_flow_key_information(
        )
        exec_id = kutype_information_list[3]
        flow_link = flow_link + str(exec_id) + '#jobslist'

        kutype_information_list.append('kutype_monitor')
        kutype_information_list.append(self.monitor_iterm)
        kutype_information_list.append('kutype-cover-countdown=' +
                                       str(countdown_time_h) + 'h')
        kutype_information_list.append('2')
        kutype_information_list.append(str(datetime.datetime.now()))
        kutype_information_list.append(project_link)
        kutype_information_list.append(flow_link)

        if Config.load_agent_debug_level() >= 3:
            modules.Util.append_log_content_to_file(Config.debug_public_strategy_file,\
                ' [' + self.kutype_name + '_kutype_cover_countdown] temp data: ' +\
                str(kutype_information_list) + '\n')
        """if there is no time buffer,  insert / update monitor mysql table"""
        if (int(self.kutype_coverd_buffer_s) !=
                0) and (countdown_minus_buffer_s <= 0):
            check_exists_ret = GeneralOperation.check_and_update_already_exists_kutype(\
                                                                kutype_information_list)
            if check_exists_ret == 255:
                GeneralOperation.insert_ygg_monitor_flow(
                    kutype_information_list)
        else:
            """If returned to normal, update abnormal information"""
            kutype_information_list[8] = '1'
            GeneralOperation.check_and_update_already_exists_kutype(
                kutype_information_list)
        """0: means that there is no time limit to cover"""
        if int(self.kutype_coverd_time_s) == 0:
            return 0
        else:
            return countdown_time_h
Пример #13
0
 def __init__(self):
     """self.kutype_list: the kutype list that needed to print out"""
     if (len(sys.argv) == 1):
         logging.error('Argv is null, at least one kutype is required')
         print('Argv is null, at least one kutype is required')
         self.kutype_list = list()
     else:
         self.kutype_list = sys.argv[1:]
     """    load register config: record ku work information"""
     """    initialization ku register config"""
     self.register_json_doc = Config.load_register_conf()
     self.register_kutype_list = dict()
     """monitor agent conf"""
     self.status_dir = Config.status_dir
Пример #14
0
    def calculate_cover_countdown(self):
        """Initialization record abnormal kutype information"""
        project_link = self.compass_domain + '/manager?project='+\
                                self.project_name + '&flow=end#executions'
        flow_link = self.compass_domain + '/executor?execid='

        """Calculate the coverage countdown"""
        self.current_unix_time = int(re.split(r'\.', str(time.time()))[0])
        countdown_time_s = self.bs_newest_page_unix_time + self.kutype_coverd_time_s\
                            - self.current_unix_time
        countdown_time_h = countdown_time_s / 3600.0
        countdown_time_h = ("%.2f" % countdown_time_h)
        countdown_minus_buffer_s = countdown_time_s - self.kutype_coverd_buffer_s

        """record abnormal kutype information"""
        get_kutype_info_instance = GetFlowinfoByCursor(self.metadata,\
                             self.iteration_cycle_cursor, self.bs_newest_page_unix_time)
        kutype_information_list = get_kutype_info_instance.get_flow_key_information()
        exec_id = kutype_information_list[3]
        flow_link = flow_link + str(exec_id) + '#jobslist'

        kutype_information_list.append('kutype_monitor')
        kutype_information_list.append(self.monitor_iterm)
        kutype_information_list.append('kutype-cover-countdown=' + str(countdown_time_h) + 'h')
        kutype_information_list.append('2')
        kutype_information_list.append(str(datetime.datetime.now()))
        kutype_information_list.append(project_link)
        kutype_information_list.append(flow_link)

        if Config.load_agent_debug_level() >= 3:
            modules.Util.append_log_content_to_file(Config.debug_public_strategy_file,\
                ' [' + self.kutype_name + '_kutype_cover_countdown] temp data: ' +\
                str(kutype_information_list) + '\n')

        """if there is no time buffer,  insert / update monitor mysql table"""
        if (int(self.kutype_coverd_buffer_s) != 0) and (countdown_minus_buffer_s <= 0):
            check_exists_ret = GeneralOperation.check_and_update_already_exists_kutype(\
                                                                kutype_information_list)
            if check_exists_ret == 255:
                GeneralOperation.insert_ygg_monitor_flow(kutype_information_list)
        else:
            """If returned to normal, update abnormal information"""
            kutype_information_list[8] = '1'
            GeneralOperation.check_and_update_already_exists_kutype(kutype_information_list)

        """0: means that there is no time limit to cover"""
        if int(self.kutype_coverd_time_s) == 0:
            return 0
        else:
            return countdown_time_h
Пример #15
0
    def __init__(self, monitor_iterm, metadata, global_conf):
        """Initialization data"""
        super(KutypeCoverCountDown, self).__init__(monitor_iterm, metadata, global_conf)

        """Thinking: bs latest cycle page time - now time < coverd_time"""
        """  |------start download page--------------------------end--------------------------------start download page============now|"""
        """  |---------------|----------------------------------------------cover time----------------------------------------------|"""
        """global_conf: type: dict()"""
        """             data from monitor.json/mysql/kutype_global_conf_table/ key:data_name, value:ygg_ku_global_conf"""
        """             data example: {u'ygg_ku_global_conf': [[7L, 'wdna', 'swift', 32L, 12L, 28L, 'yq01-global', 'wdna=96', '0', '0', 30L, 0L]]}"""

        self.kutype_name = global_conf['ygg_ku_global_conf'][0][1]
        self.database_kutype_name = global_conf['ygg_ku_global_conf'][0][2]
        self.kutype_coverd_time = global_conf['ygg_ku_global_conf'][0][7]
        self.kutype_coverd_time_s = modules.Util.time_formate_conversion_to_s(\
                                    self.kutype_coverd_time)
        self.kutype_coverd_buffer = global_conf['ygg_ku_global_conf'][0][8]
        self.kutype_coverd_buffer_s = modules.Util.time_formate_conversion_to_s(\
                                    self.kutype_coverd_buffer)
        self.compass_domain = global_conf['ygg_ku_global_conf'][0][10]
        self.project_name = global_conf['ygg_ku_global_conf'][0][2]
        self.monitor_iterm = monitor_iterm

        """get bs newest iteration and cycle id"""
        """self.iteration_cycle_cursor: record bs newest iteration - cycle, for get kutype information by GetFlowinfoByCursor method"""
        self.kutype_status_dir = Config.status_dir + '/' + self.kutype_name
        self.kutype_bs_newest_iteration_file= self.kutype_status_dir + '/bs_newest_iteration.status'
        self.kutype_bs_newest_cycle_file = self.kutype_status_dir + '/bs_newest_cycle.status'
        self.bs_newest_iteration = modules.Util.get_newest_value_by_key_file(\
                self.kutype_bs_newest_iteration_file, 'BS_NEWEST_ITERATION')
        self.bs_newest_cycle = modules.Util.get_newest_value_by_key_file(\
                self.kutype_bs_newest_cycle_file, 'BS_NEWEST_CYCLE')
        self.iteration_cycle_cursor = [self.bs_newest_iteration, self.bs_newest_cycle]

        """get bs newest build env info"""
        sql_sentence = 'select * from ygg_job' +\
            ' where kuType=\'' + str(self.database_kutype_name) +\
            '\' and iterId=\'' + str(self.bs_newest_iteration) + \
            '\' and cycleId=\'' + str(self.bs_newest_cycle) + \
            '\' and jobName regexp \'download\''
        if Config.load_agent_debug_level() >= 3:
            modules.Util.append_log_content_to_file(Config.debug_sql_sentence_file,\
                ' [' + self.kutype_name + '_' + self.kutype_name + \
                '_cover_countdown] sql sentence: ' + sql_sentence + '\n')
        self.metadata = AccessMysqlData.load_mysql_data_by_table('ygg_job', sql_sentence)

        """bs newest page time"""
        self.bs_newest_page_unix_time = 0
        self.current_unix_time = 0
Пример #16
0
    def __init__(self, monitor_iterm, metadata, global_conf):
        """Initialization data"""
        super(KutypeCoverCountDown, self).__init__(monitor_iterm, metadata,
                                                   global_conf)
        """Thinking: bs latest cycle page time - now time < coverd_time"""
        """  |------start download page--------------------------end--------------------------------start download page============now|"""
        """  |---------------|----------------------------------------------cover time----------------------------------------------|"""
        """global_conf: type: dict()"""
        """             data from monitor.json/mysql/kutype_global_conf_table/ key:data_name, value:ygg_ku_global_conf"""
        """             data example: {u'ygg_ku_global_conf': [[7L, 'wdna', 'swift', 32L, 12L, 28L, 'yq01-global', 'wdna=96', '0', '0', 30L, 0L]]}"""

        self.kutype_name = global_conf['ygg_ku_global_conf'][0][1]
        self.database_kutype_name = global_conf['ygg_ku_global_conf'][0][2]
        self.kutype_coverd_time = global_conf['ygg_ku_global_conf'][0][7]
        self.kutype_coverd_time_s = modules.Util.time_formate_conversion_to_s(\
                                    self.kutype_coverd_time)
        self.kutype_coverd_buffer = global_conf['ygg_ku_global_conf'][0][8]
        self.kutype_coverd_buffer_s = modules.Util.time_formate_conversion_to_s(\
                                    self.kutype_coverd_buffer)
        self.compass_domain = global_conf['ygg_ku_global_conf'][0][10]
        self.project_name = global_conf['ygg_ku_global_conf'][0][2]
        self.monitor_iterm = monitor_iterm
        """get bs newest iteration and cycle id"""
        """self.iteration_cycle_cursor: record bs newest iteration - cycle, for get kutype information by GetFlowinfoByCursor method"""
        self.kutype_status_dir = Config.status_dir + '/' + self.kutype_name
        self.kutype_bs_newest_iteration_file = self.kutype_status_dir + '/bs_newest_iteration.status'
        self.kutype_bs_newest_cycle_file = self.kutype_status_dir + '/bs_newest_cycle.status'
        self.bs_newest_iteration = modules.Util.get_newest_value_by_key_file(\
                self.kutype_bs_newest_iteration_file, 'BS_NEWEST_ITERATION')
        self.bs_newest_cycle = modules.Util.get_newest_value_by_key_file(\
                self.kutype_bs_newest_cycle_file, 'BS_NEWEST_CYCLE')
        self.iteration_cycle_cursor = [
            self.bs_newest_iteration, self.bs_newest_cycle
        ]
        """get bs newest build env info"""
        sql_sentence = 'select * from ygg_job' +\
            ' where kuType=\'' + str(self.database_kutype_name) +\
            '\' and iterId=\'' + str(self.bs_newest_iteration) + \
            '\' and cycleId=\'' + str(self.bs_newest_cycle) + \
            '\' and jobName regexp \'download\''
        if Config.load_agent_debug_level() >= 3:
            modules.Util.append_log_content_to_file(Config.debug_sql_sentence_file,\
                ' [' + self.kutype_name + '_' + self.kutype_name + \
                '_cover_countdown] sql sentence: ' + sql_sentence + '\n')
        self.metadata = AccessMysqlData.load_mysql_data_by_table(
            'ygg_job', sql_sentence)
        """bs newest page time"""
        self.bs_newest_page_unix_time = 0
        self.current_unix_time = 0
Пример #17
0
    def validate_configuration():
        """validate public configuration"""
        """count : in order to check strategy num"""
        count = 0
        config_single_public_strategy = ""
        public_strategy_json_doc = Config.load_strategy_conf()

        for config_single_public_strategy in re.split(r';\s*',\
                     public_strategy_json_doc['public_strategy']):
            count += 1

        if int(count) != int(public_strategy_json_doc['public_strategy_num']):
            logging.error("register public strategy count error!")
            return 1
        return 0
Пример #18
0
    def __init__(self):
        """self.kutype_list: the kutype list that needed to print out"""
        if (len(sys.argv) == 1):
            logging.error('Argv is null, at least one kutype is required')
            print('Argv is null, at least one kutype is required')
            self.kutype_list = list()
        else:
            self.kutype_list =  sys.argv[1:]

        """    load register config: record ku work information"""
        """    initialization ku register config"""
        self.register_json_doc = Config.load_register_conf()
        self.register_kutype_list = dict()

        """monitor agent conf"""
        self.status_dir = Config.status_dir
Пример #19
0
    def validate_configuration():
        """validate public configuration"""

        """count : in order to check strategy num"""
        count = 0
        config_single_public_strategy = ""
        public_strategy_json_doc = Config.load_strategy_conf()

        for config_single_public_strategy in re.split(r';\s*',\
                     public_strategy_json_doc['public_strategy']):
            count += 1

        if int(count) != int(public_strategy_json_doc['public_strategy_num']):
            logging.error("register public strategy count error!")
            return 1
        return 0
Пример #20
0
    def public_strategy_analysis(\
            public_strategy_name, mon_iterm, metadata=None, kutype_global_conf=None):
        """schedule public strategy for analysis"""
        PublicStrategySchedule.validate_configuration()
        PublicStrategySchedule.register_strategy_and_features(\
                mon_iterm, metadata, kutype_global_conf)

        if public_strategy_name not in PublicStrategySchedule.support_register_public_strategys:
            logging.error('Unkown strategy : %s.' % public_strategy_name)
            return 1

        if Config.load_agent_debug_level() >= 5:
            modules.Util.append_log_content_to_file(Config.debug_sql_data_file,\
                ' strategy_name : [' + public_strategy_name + '] mon_iterm : [' + mon_iterm +\
                ' ] metadata : [' + str(metadata) + '] kutype_global_conf : [' +\
                 str(kutype_global_conf) + ']\n')
        excute_analysis_features = \
            PublicStrategySchedule.bind_strategy_and_feature[public_strategy_name]

        return excute_analysis_features.start()
Пример #21
0
    def public_strategy_analysis(\
            public_strategy_name, mon_iterm, metadata=None, kutype_global_conf=None):
        """schedule public strategy for analysis"""
        PublicStrategySchedule.validate_configuration()
        PublicStrategySchedule.register_strategy_and_features(\
                mon_iterm, metadata, kutype_global_conf)

        if public_strategy_name not in PublicStrategySchedule.support_register_public_strategys:
            logging.error('Unkown strategy : %s.' % public_strategy_name)
            return 1

        if Config.load_agent_debug_level() >= 5:
            modules.Util.append_log_content_to_file(Config.debug_sql_data_file,\
                ' strategy_name : [' + public_strategy_name + '] mon_iterm : [' + mon_iterm +\
                ' ] metadata : [' + str(metadata) + '] kutype_global_conf : [' +\
                 str(kutype_global_conf) + ']\n')
        excute_analysis_features = \
            PublicStrategySchedule.bind_strategy_and_feature[public_strategy_name]

        return excute_analysis_features.start()
Пример #22
0
    def load_meta_data(self, load_mysql_all_table_name=None):
        """load meta config"""
        """get mysql data range"""
        """mysql_data_iteration_min: means bs data version"""
        """mysql_data_cycle_min: means bs db number"""
        range_iteration_min = 0
        range_iteration_max = 0
        range_cycle_min = 0
        range_cycle_max = 0
        """initialization self.ku_total_cycle_info value"""
        self.ku_total_cycle_info = list()

        if load_mysql_all_table_name is None:
            logging.error('load_mysql_all_table_name value is emputy!')
            return 1
        """load mysql data from register table name"""
        for load_mysql_single_table_name in load_mysql_all_table_name.split(
                ';'):
            """get current iteration - cycle"""
            if load_mysql_single_table_name == 'ygg_job':
                """get current total cycle range"""
                bs_current_newest_cursor = self.get_behind_bs_iteration_cycle(\
                                            load_mysql_single_table_name)
                """load mysql data for strategy analysis"""
                sql_sentence = 'select * from ' + load_mysql_single_table_name +\
                ' where (kuType=\'' + str(self.ku_monitor_trick_type) +\
                '\') and ((iterId=\'' + str(bs_current_newest_cursor['iteration']) +\
                '\' and cycleId>=\'' + str(bs_current_newest_cursor['cycle']) +\
                '\') or ( iterId>\'' + str(bs_current_newest_cursor['iteration']) +\
                '\'));'
                if Config.load_agent_debug_level() >= 2:
                    modules.Util.append_log_content_to_file(Config.debug_sql_sentence_file,\
                        ' [' + self.thread_name + '] sql sentence : [' + sql_sentence + ']\n')
                self.ku_total_cycle_info = AccessMysqlData.load_mysql_data_by_table(\
                    load_mysql_single_table_name, sql_sentence)

                if self.ku_total_cycle_info == '1':
                    logging.error('load mysql data operating failed.')
                    return 1
        return 0
Пример #23
0
    def clear_finished_build_info(global_info):
        """clear the abnormal information in the ygg_monitor table"""
        """ kutype & flow level:"""
        """     Thinking: with bs newest iteration-cycle as the limit, update the information before this limit."""
        """job level:"""
        """     Thinking: update the newest job status"""
        """                 if the newest job status is succ, then update all"""

        kutype_global_info = global_info
        kutype_name = kutype_global_info['ygg_ku_global_conf'][0][1]
        project_name = kutype_global_info['ygg_ku_global_conf'][0][2]

        """get bs newest iteration and cycle id"""
        """iteration_cycle_cursor: record bs newest iteration - cycle, for get kutype information by GetFlowinfoByCursor method"""
        kutype_status_dir = Config.status_dir + '/' + kutype_name
        kutype_bs_newest_iteration_file= kutype_status_dir + '/bs_newest_iteration.status'
        kutype_bs_newest_cycle_file = kutype_status_dir + '/bs_newest_cycle.status'
        bs_newest_iteration = modules.Util.get_newest_value_by_key_file(\
                kutype_bs_newest_iteration_file, 'BS_NEWEST_ITERATION')
        bs_newest_cycle = modules.Util.get_newest_value_by_key_file(\
                kutype_bs_newest_cycle_file, 'BS_NEWEST_CYCLE')

        """Initialization abnormal mysql table name"""
        ygg_monitor_table_name = 'ygg_monitor'
        single_job_info = list()

        """get ygg monitor information"""
        """     flow_monitor: flow level abnormal"""
        """     kutype_monitor: kutype level abnormal"""
        sql_sentence = 'select * from ' + ygg_monitor_table_name + ' where currStatus!=1 and ' +\
                        'projectName=\'' + project_name + '\' and errorName!=\'flow_monitor\' and '\
                        'errorName!=\'kutype_monitor\''
        if Config.load_agent_debug_level() >= 2:
            modules.Util.append_log_content_to_file(Config.debug_sql_sentence_file,\
                        ' [clear_finished_build_info_check] sql sentence: ' + sql_sentence + '\n')
        mysql_return_data = AccessMysqlData.load_mysql_data_by_table(\
                                    ygg_monitor_table_name, sql_sentence)

        """check and update job information"""
        for single_job_info in mysql_return_data:
            abnormal_job_project = single_job_info[1]
            abnormal_job_iteratiom = str(single_job_info[2])
            abnormal_job_cycle = str(single_job_info[3])
            abnormal_job_execid = str(single_job_info[4])
            abnormal_job_name = single_job_info[5]
            abnormal_job_analysis = single_job_info[6]

            """get newest job info"""
            sql_sentence = 'select * from ygg_job where jobName=\'' + abnormal_job_name +\
                            '\' and iterId=\'' + abnormal_job_iteratiom + '\' and cycleId=\'' +\
                            abnormal_job_cycle + '\' and kutype=\'' + abnormal_job_project +\
                            '\' and execid=\'' + abnormal_job_execid + '\';'

            if Config.load_agent_debug_level() >= 2:
                modules.Util.append_log_content_to_file(Config.debug_sql_sentence_file,\
                        ' [clear_finished_build_info_check] sql sentence: ' + sql_sentence + '\n')
            mysql_return_data = AccessMysqlData.load_mysql_data_by_table(\
                                    'ygg_job', sql_sentence)
            if mysql_return_data:
                abnormal_job_curr_status = mysql_return_data[0][2]
                abnormal_job_curr_hadoop_link = mysql_return_data[0][10]
                abnormal_job_curr_stderr_link = mysql_return_data[0][11]
                

                """update abnormal job info"""
                error_appear_time = datetime.datetime.now()
                sql_sentence = 'update ' + ygg_monitor_table_name + ' set currStatus=\''\
                    + str(abnormal_job_curr_status) + '\', errorAppeartime=\''\
                    + str(error_appear_time) + '\', jobHadoopLink=\'' +\
                    abnormal_job_curr_hadoop_link + '\', jobStderrLink=\'' +\
                    abnormal_job_curr_stderr_link + '\' where currStatus!=1 and projectName=\''\
                    + project_name + '\' and errorAnalysis=\'' + abnormal_job_analysis +\
                    '\' and execId=\'' + abnormal_job_execid + '\' and errorName=\'' +\
                    abnormal_job_name + '\' and iterId=\'' + abnormal_job_iteratiom +\
                    '\' and cycleId=\'' + abnormal_job_cycle + '\';'

                if Config.load_agent_debug_level() >= 2:
                    modules.Util.append_log_content_to_file(Config.debug_sql_sentence_file,\
                    ' [clear_finished_build_info_update_job] sql sentence: ' + sql_sentence + '\n')

                mysql_return_data = AccessMysqlData.insert_mysql_data_by_table(\
                                        ygg_monitor_table_name, sql_sentence)
                if mysql_return_data == 1:
                    return 1


        """update kutype and flow info"""
        error_appear_time = datetime.datetime.now()
        sql_sentence = 'update ' + ygg_monitor_table_name + ' set currStatus=1, errorAppeartime=\''\
                        + str(error_appear_time) + '\' where (currStatus!=1 and projectName=\''\
                        + project_name + '\') and ((iterId<\'' + bs_newest_iteration +\
                        '\') or (iterId=\'' + bs_newest_iteration +\
                        '\' and cycleId<=\'' + bs_newest_cycle + '\'))'

        if Config.load_agent_debug_level() >= 2:
            modules.Util.append_log_content_to_file(Config.debug_sql_sentence_file,\
                        ' [clear_finished_build_info_update_flow] sql sentence: ' + sql_sentence + '\n')
        mysql_return_data = AccessMysqlData.load_mysql_data_by_table(\
                                    ygg_monitor_table_name, sql_sentence)

        return AccessMysqlData.insert_mysql_data_by_table(\
                                    ygg_monitor_table_name, sql_sentence)
Пример #24
0
    def analysis(self):
        """Start analysis"""
        single_job_info = list()
        job_name = ""
        """
        +----------+------------+
        | statusId | statusName |
        +----------+------------+
        |        0 | notstarted |
        |        1 | success    |
        |        2 | running    |
        |        3 | failed     |
        |        4 | skipped    |
        |        5 | killed     |
        +----------+------------+
        """
        job_running_status = 2
        job_succ_status = 1
        job_current_status = 0
        job_start_time = 0
        job_used_time = 0

        for single_job_info in self.metadata:
            job_name = single_job_info[0]
            job_current_status = single_job_info[2]
            job_execid = str(single_job_info[9])
            job_start_time_datetime = str(single_job_info[7])

            if job_current_status == job_running_status:
                """get running job information"""
                for raw in range(0, len(self.kutype_job_conf)):
                    """Get timeout configuration"""
                    conf_regex_jobname = str(self.kutype_job_conf[raw][3])
                    job_name_regex = re.compile(conf_regex_jobname)

                    if job_name_regex.findall(job_name):
                        """Check if it times out"""
                        self.job_lowsla_time_s = self.kutype_job_conf[raw][
                            5] * 60
                        self.job_sla_time_s = self.kutype_job_conf[raw][6] * 60
                        self.job_autofail_switch = self.kutype_job_conf[raw][8]
                        job_start_time = modules.Util.datetime_to_unixtime(
                            single_job_info[7])
                        self.current_unix_time = int(
                            re.split(r'\.', str(time.time()))[0])
                        job_used_time_s = self.current_unix_time - job_start_time

                        if job_used_time_s > self.job_sla_time_s:
                            """Record timeout information"""
                            job_used_time_h = job_used_time_s / 3600
                            job_used_time_h = ("%.2f" % job_used_time_h)
                            job_error_time = datetime.datetime.now()
                            check_exists_ret = GeneralOperation.check_and_insert_already_exists_job(\
                                            single_job_info, self.monitor_iterm,\
                                            'job_runningtime=' + str(job_used_time_h) + 'h',\
                                            str(job_error_time))
                            if check_exists_ret == 1:
                                logging.error('\
                                    [JobOvertime] insert new job info to ygg_monitor failed!'
                                              )
                            self.overtime_job_num += 1
                            """callback function"""
                            if str(self.job_autofail_switch) == '0':
                                """0: means close auto failover function"""
                                continue
                            job_hadoop_url = single_job_info[10]
                            if job_hadoop_url is None:
                                exec_cmd = '[' + job_name + '] hadoop jobid is none.'
                            else:
                                """exec failover cmd"""
                                job_hadoop_jobid = re.split(
                                    r'jobid=', job_hadoop_url)[1]
                                job_cluster = self.hadoop_cluster
                                exec_cmd = 'python ' + self.failed_bin
                                exec_cmd = exec_cmd + ' -j ' + job_hadoop_jobid
                                exec_cmd = exec_cmd + ' -c ' + job_cluster
                                exec_cmd = exec_cmd + ' &'
                                print exec_cmd
                                os.system(exec_cmd)
                                print '--- fail over ok ---'
                                """record failover operating"""
                                """This should not use the AccessMysqlData class"""
                                """I should use the GeneralOperation class"""
                                """But......"""
                                time_now = datetime.datetime.now()
                                sql_sentence = 'update ygg_monitor set operator=\'build_bot\', ' +\
                                                'operatingTime=\'' + str(time_now) +'\', ' +\
                                                'operating=\'Failover ' + job_hadoop_jobid + '\'' +\
                                                'where execId=\'' + job_execid + '\' and ' +\
                                                'errorName=\'' + job_name + '\' and ' +\
                                                'errorStarttime=\'' + job_start_time_datetime +\
                                                '\' and currStatus=\'2\';'
                                check_exists_ret = AccessMysqlData.insert_mysql_data_by_table(\
                                                        'ygg_monitor', sql_sentence)
                                if check_exists_ret == 1:
                                    logging.error('\
                                        [JobOvertime] update bot fail info to ygg_monitor failed!'
                                                  )
                            if Config.load_agent_debug_level() >= 3:
                                modules.Util.append_log_content_to_file(\
                                            Config.debug_public_strategy_file,\
                                            ' [' + self.kutype_name +\
                                            '_kutype_auto_failed] record data: ' +\
                                            exec_cmd + '\n')
                        break

        return self.overtime_job_num
Пример #25
0
        if Config.load_agent_debug_level() >= 2:
            modules.Util.append_log_content_to_file(Config.debug_sql_sentence_file,\
                        ' [clear_finished_build_info_update_flow] sql sentence: ' + sql_sentence + '\n')
        mysql_return_data = AccessMysqlData.load_mysql_data_by_table(\
                                    ygg_monitor_table_name, sql_sentence)

        return AccessMysqlData.insert_mysql_data_by_table(\
                                    ygg_monitor_table_name, sql_sentence)

if __name__ == "__main__":

    single_job_info = list()
    raw = 0

    global_conf = Config.load_mysql_global_kutype_conf()
    metadata_conf = Config.load_mysql_kutype_metadata_conf()
    '''
    debug_info = AccessMysqlData.load_mysql_data_by_table(\
        'ygg_job', 'select * from ygg_job where kuType=\'swift\' and iterId=\'14\' and cycleId=\'22\' and errorName regexp \'download*\';')

    for single_job_info in debug_info:
        print single_job_info
        job_error_analysis = 'kutype_failed_job_num<1'
        nowtime = str(datetime.datetime.now())
        GeneralOperation.check_and_update_already_exists_job(single_job_info, job_error_analysis, job_error_analysis, nowtime)
        GeneralOperation.insert_ygg_monitor_job(single_job_info, job_error_analysis, 'job_failed', '2017-05-10 17:24:13')

    print AccessMysqlData.insert_mysql_data_by_table('ygg_monitor', 'INSERT INTO ygg_monitor \
        (projectName, iterId, cycleId, errorName, errorAnalysis, errorStatus, currStatus,\
        jobStarttime, jobErrortime, jobLink, operating) VALUES (\'wdna\', \'14\', \'22\',\
Пример #26
0
        '''

        if self.monitor_iterm_key == 'kutype_running_flow_num':
            analysis_instance = KutypeRunningFlowNum(\
                                self.mon_iterm, self.meta_data, self.kutype_global_conf)
        elif self.monitor_iterm_key == 'kutype_running_job_num':
            analysis_instance = KutypeRunningJobNum(\
                                self.mon_iterm, self.meta_data, self.kutype_global_conf)
        elif self.monitor_iterm_key == 'kutype_failed_flow_num':
            analysis_instance = KutypeFailedFlowNum(\
                                self.mon_iterm, self.meta_data, self.kutype_global_conf)
        elif self.monitor_iterm_key == 'kutype_failed_job_num':
            analysis_instance = KutypeFailedJobNum(\
                                self.mon_iterm, self.meta_data, self.kutype_global_conf)
        elif self.monitor_iterm_key == 'kutype_cover_countdown':
            analysis_instance = KutypeCoverCountDown(\
                                self.mon_iterm, self.meta_data, self.kutype_global_conf)
        elif self.monitor_iterm_key == 'kutype_start_interval':
            analysis_instance = KutypeStartInterval(\
                                self.mon_iterm, self.meta_data, self.kutype_global_conf)
        else:
            return 0

        return analysis_instance.analysis()


if __name__ == "__main__":
    if Config.load_agent_debug_level() >= 3:
        modules.Util.append_log_content_to_file(
            Config.debug_public_strategy_file, 'test')
Пример #27
0
from flask import Flask

from load_config import Config

app = Flask(__name__)


config_defaults = {
    "products_table": "Products",
    "product_name": "Name",
    "product_description": "Description",
    "product_photos": "Photos",
    "product_status": "Status",
    "product_price": "Price",
    "orders_table": "Orders"
}

c = Config(defaults=config_defaults)


@app.route('/')
def hello_world():
    return 'Hello AiroShop!'


if __name__ == '__main__':
    app.run()
Пример #28
0
    def analysis(self):
        """Start analysis"""
        single_job_info = list()
        job_name = ""
        regex_start_job_flag = re.compile(r'download')
        regex_end_job_flag = re.compile(r'^end')

        """flow_iter_cycle_start_end_time: key:[iter_id_cycleid_(start|end)], value:[unixformat_time]"""
        flow_iter_cycle_start_end_time = dict()

        """flow information, for ygg monitor"""
        """  example: [ 'project', 'iterid', 'cycleid', 'flow_monitor', 'error_analysis', 'error_status',"""
        """             'flow_current_status', 'error_starttime', 'error_appear_time', 'project link',' flow link',   ]"""
        flow_information_list = list()
        project_link = self.compass_domain + '/manager?project='+\
                                self.project_name + '&flow=end#executions'

        for single_job_info in self.metadata:
            job_name = single_job_info[0]
            flag = ""

            """get all build process download job info"""
            """start: get the earliest start download job time for each cycle as the flow start time"""
            """end: get the end job time for each cycle as the flow end time"""
            if regex_start_job_flag.findall(job_name):
                flag = '_start'
                iteration_cursor = str(single_job_info[4])
                cycle_cursor = int(single_job_info[5])
                if str(single_job_info[7]) == 'None':
                    continue
                dateformat_time = single_job_info[7]
                unixformat_time = int(re.split(r'\.',\
                        str(time.mktime(dateformat_time.timetuple())))[0])

                dict_key = str(iteration_cursor) + '_' + str(cycle_cursor) + flag
                if (not flow_iter_cycle_start_end_time.has_key(dict_key)) or\
                    (unixformat_time < flow_iter_cycle_start_end_time[dict_key]):
                    flow_iter_cycle_start_end_time[dict_key] = unixformat_time
            elif (regex_end_job_flag.findall(job_name)) and (int(single_job_info[2]) == 1):
                flag = '_end'
                iteration_cursor = str(single_job_info[4])
                cycle_cursor = int(single_job_info[5])
                dateformat_time = single_job_info[7]
                unixformat_time = int(re.split(r'\.',\
                        str(time.mktime(dateformat_time.timetuple())))[0])

                dict_key = str(iteration_cursor) + '_' + str(cycle_cursor) + flag
                if (not flow_iter_cycle_start_end_time.has_key(dict_key)):
                    flow_iter_cycle_start_end_time[dict_key] = unixformat_time

        """ for sort by iteration then cycle"""
        """  [ iteration_cycle ] ---> [[iteration1, cycle1], [iteration2, cycle2], ...]"""
        dict_key_list = list()

        for count in range(0, len(flow_iter_cycle_start_end_time.keys())):
            iteration_cursor_current = int(\
                        re.split(r'_', flow_iter_cycle_start_end_time.keys()[count])[0])
            cycle_cursor_current = int(\
                        re.split(r'_', flow_iter_cycle_start_end_time.keys()[count])[1])
            """In order to get heavy: start / end flasg"""
            list_cursor_current = [iteration_cursor_current, cycle_cursor_current]
            if list_cursor_current not in dict_key_list:
                dict_key_list.append(list_cursor_current)
        dict_key_list.sort(key=itemgetter(0, 1))

        if Config.load_agent_debug_level() >= 3:
            modules.Util.append_log_content_to_file(Config.debug_public_strategy_file,\
                ' [' + self.kutype_name + '_flow_overtime_num] temp data: ' +\
                str(flow_iter_cycle_start_end_time) + '\n')

        """find the incomplete flow, and calculate whether the timeout"""
        """if the timeout expires, the counter is incremented"""
        for iteration_cycle_cursor in dict_key_list:
            """get current iteration and cycle id"""
            iteration_cursor_current = str(iteration_cycle_cursor[0])
            cycle_cursor_current = str(iteration_cycle_cursor[1])

            dict_start_key = iteration_cursor_current + '_' + cycle_cursor_current + '_start'
            dict_end_key = iteration_cursor_current + '_' + cycle_cursor_current + '_end'
            iter_cycle_cusror_start_time = int(flow_iter_cycle_start_end_time[dict_start_key])

            """calculating time"""
            self.current_unix_time = int(re.split(r'\.', str(time.time()))[0])
            running_time_s = iter_cycle_cusror_start_time + self.kutype_running_time_s -\
                            self.current_unix_time
            used_time_h = (self.kutype_running_time_s - running_time_s) / 3600.0
            used_time_h = ("%.2f" % used_time_h)

            """record flow information"""
            """ This realization is not the best way, forced by time"""
            get_flow_info_instance = GetFlowinfoByCursor(\
                self.metadata, iteration_cycle_cursor, iter_cycle_cusror_start_time)
            flow_information_list = get_flow_info_instance.get_flow_key_information()
            exec_id = flow_information_list[3]
            flow_link = self.compass_domain + '/executor?execid='
            flow_link = flow_link + str(exec_id) + '#jobslist'

            flow_information_list.append('flow_monitor')
            flow_information_list.append(self.monitor_iterm)
            flow_information_list.append('flow-usedtime=' + str(used_time_h) + 'h')
            flow_information_list.append('2')
            flow_information_list.append(str(datetime.datetime.now()))
            flow_information_list.append(project_link)
            flow_information_list.append(flow_link)

            if Config.load_agent_debug_level() >= 3:
                modules.Util.append_log_content_to_file(Config.debug_public_strategy_file,\
                    ' [' + self.kutype_name + '_flow_overtime_num] temp data: ' +\
                    str(flow_information_list) + '\n')

            """if flow not done"""
            if (not flow_iter_cycle_start_end_time.has_key(dict_end_key)):

                """if overtime, record error info"""
                if running_time_s <= 0:
                    self.overtime_flow_num += 1
                    check_exists_ret = GeneralOperation.check_and_update_already_exists_flow(\
                                                                        flow_information_list)
                    if check_exists_ret == 255:
                        GeneralOperation.insert_ygg_monitor_flow(flow_information_list)
            else:
                """if flow done, update end time stamp"""
                flow_information_list[8] = '1'
                flow_information_list[9] = modules.Util.unixtime_to_datetime(\
                                            flow_iter_cycle_start_end_time[dict_end_key])
                GeneralOperation.check_and_update_already_exists_flow(flow_information_list)

        return self.overtime_flow_num
Пример #29
0
    def start(self):
        """start method"""
        """
        print "flow - %s - start analysis!!!" % self.strategy_name
        """

        if self.monitor_iterm_key == 'flow_overtime_num':
            analysis_instance = FlowOvertimeNum(\
                    self.mon_iterm, self.meta_data, self.kutype_global_conf)
        else:
            return 0

        return analysis_instance.analysis()


if __name__ == "__main__":
    ku_global_conf_list = dict()
    ku_global_conf = Config.load_mysql_global_kutype_conf()
    kutype = 'wp'

    for raw in range(0, len(ku_global_conf)):
        sql_sentence = 'select * from ' + ku_global_conf[raw]['table'] + \
                        ' where kuType=\'' + kutype + '\';'
        ku_global_conf_list[ku_global_conf[raw]['data_name']] = \
            AccessMysqlData.load_mysql_data_by_conf(ku_global_conf[raw], sql_sentence)
    kutype_global_conf = ku_global_conf_list

    debug_instance = StartAnalysis('flow_overtime_num', 'test', kutype_global_conf)
    debug_instance.start()
Пример #30
0
from file_manager import FileReader
from load_config import Config
import time


# ToDo: Save somewhere number from first run and later parse just newest file and keep updating this number.
# ToDo: Try to make change of regexes possible without editing of code.
# ToDo: Progress bar when parsing files.
# ToDo: Add support for .gz files.
# ToDo: Add bot filtration option.

start_time = time.clock()

config = Config()
config.load_config()
print "\nFor domain '" + config.domain + "' page '" + config.page + "', there are %s unique IPs in your logs." % \
            len(FileReader.get_uniq_ips(config.location, config.files, config.separate, config.page, config.domain))

print "It took %.2fs to figure it out." % (time.clock() - start_time)
Пример #31
0
    def analysis(self):
        """Start analysis"""
        single_job_info = list()
        job_name = ""
        regex_start_job_flag = re.compile(r'download')
        regex_end_job_flag = re.compile(r'^end')
        """flow_iter_cycle_start_end_time: key:[iter_id_cycleid_(start|end)], value:[unixformat_time]"""
        flow_iter_cycle_start_end_time = dict()
        """flow information, for ygg monitor"""
        """  example: [ 'project', 'iterid', 'cycleid', 'flow_monitor', 'error_analysis', 'error_status',"""
        """             'flow_current_status', 'error_starttime', 'error_appear_time', 'project link',' flow link',   ]"""
        flow_information_list = list()
        project_link = self.compass_domain + '/manager?project='+\
                                self.project_name + '&flow=end#executions'

        for single_job_info in self.metadata:
            job_name = single_job_info[0]
            flag = ""
            """get all build process download job info"""
            """start: get the earliest start download job time for each cycle as the flow start time"""
            """end: get the end job time for each cycle as the flow end time"""
            if regex_start_job_flag.findall(job_name):
                flag = '_start'
                iteration_cursor = str(single_job_info[4])
                cycle_cursor = int(single_job_info[5])
                if str(single_job_info[7]) == 'None':
                    continue
                dateformat_time = single_job_info[7]
                unixformat_time = int(re.split(r'\.',\
                        str(time.mktime(dateformat_time.timetuple())))[0])

                dict_key = str(iteration_cursor) + '_' + str(
                    cycle_cursor) + flag
                if (not flow_iter_cycle_start_end_time.has_key(dict_key)) or\
                    (unixformat_time < flow_iter_cycle_start_end_time[dict_key]):
                    flow_iter_cycle_start_end_time[dict_key] = unixformat_time
            elif (regex_end_job_flag.findall(job_name)) and (int(
                    single_job_info[2]) == 1):
                flag = '_end'
                iteration_cursor = str(single_job_info[4])
                cycle_cursor = int(single_job_info[5])
                dateformat_time = single_job_info[7]
                unixformat_time = int(re.split(r'\.',\
                        str(time.mktime(dateformat_time.timetuple())))[0])

                dict_key = str(iteration_cursor) + '_' + str(
                    cycle_cursor) + flag
                if (not flow_iter_cycle_start_end_time.has_key(dict_key)):
                    flow_iter_cycle_start_end_time[dict_key] = unixformat_time
        """ for sort by iteration then cycle"""
        """  [ iteration_cycle ] ---> [[iteration1, cycle1], [iteration2, cycle2], ...]"""
        dict_key_list = list()

        for count in range(0, len(flow_iter_cycle_start_end_time.keys())):
            iteration_cursor_current = int(\
                        re.split(r'_', flow_iter_cycle_start_end_time.keys()[count])[0])
            cycle_cursor_current = int(\
                        re.split(r'_', flow_iter_cycle_start_end_time.keys()[count])[1])
            """In order to get heavy: start / end flasg"""
            list_cursor_current = [
                iteration_cursor_current, cycle_cursor_current
            ]
            if list_cursor_current not in dict_key_list:
                dict_key_list.append(list_cursor_current)
        dict_key_list.sort(key=itemgetter(0, 1))

        if Config.load_agent_debug_level() >= 3:
            modules.Util.append_log_content_to_file(Config.debug_public_strategy_file,\
                ' [' + self.kutype_name + '_flow_overtime_num] temp data: ' +\
                str(flow_iter_cycle_start_end_time) + '\n')
        """find the incomplete flow, and calculate whether the timeout"""
        """if the timeout expires, the counter is incremented"""
        for iteration_cycle_cursor in dict_key_list:
            """get current iteration and cycle id"""
            iteration_cursor_current = str(iteration_cycle_cursor[0])
            cycle_cursor_current = str(iteration_cycle_cursor[1])

            dict_start_key = iteration_cursor_current + '_' + cycle_cursor_current + '_start'
            dict_end_key = iteration_cursor_current + '_' + cycle_cursor_current + '_end'
            iter_cycle_cusror_start_time = int(
                flow_iter_cycle_start_end_time[dict_start_key])
            """calculating time"""
            self.current_unix_time = int(re.split(r'\.', str(time.time()))[0])
            running_time_s = iter_cycle_cusror_start_time + self.kutype_running_time_s -\
                            self.current_unix_time
            used_time_h = (self.kutype_running_time_s -
                           running_time_s) / 3600.0
            used_time_h = ("%.2f" % used_time_h)
            """record flow information"""
            """ This realization is not the best way, forced by time"""
            get_flow_info_instance = GetFlowinfoByCursor(\
                self.metadata, iteration_cycle_cursor, iter_cycle_cusror_start_time)
            flow_information_list = get_flow_info_instance.get_flow_key_information(
            )
            exec_id = flow_information_list[3]
            flow_link = self.compass_domain + '/executor?execid='
            flow_link = flow_link + str(exec_id) + '#jobslist'

            flow_information_list.append('flow_monitor')
            flow_information_list.append(self.monitor_iterm)
            flow_information_list.append('flow-usedtime=' + str(used_time_h) +
                                         'h')
            flow_information_list.append('2')
            flow_information_list.append(str(datetime.datetime.now()))
            flow_information_list.append(project_link)
            flow_information_list.append(flow_link)

            if Config.load_agent_debug_level() >= 3:
                modules.Util.append_log_content_to_file(Config.debug_public_strategy_file,\
                    ' [' + self.kutype_name + '_flow_overtime_num] temp data: ' +\
                    str(flow_information_list) + '\n')
            """if flow not done"""
            if (not flow_iter_cycle_start_end_time.has_key(dict_end_key)):
                """if overtime, record error info"""
                if running_time_s <= 0:
                    self.overtime_flow_num += 1
                    check_exists_ret = GeneralOperation.check_and_update_already_exists_flow(\
                                                                        flow_information_list)
                    if check_exists_ret == 255:
                        GeneralOperation.insert_ygg_monitor_flow(
                            flow_information_list)
            else:
                """if flow done, update end time stamp"""
                flow_information_list[8] = '1'
                flow_information_list[9] = modules.Util.unixtime_to_datetime(\
                                            flow_iter_cycle_start_end_time[dict_end_key])
                GeneralOperation.check_and_update_already_exists_flow(
                    flow_information_list)

        return self.overtime_flow_num
Пример #32
0
        print "Kutype - %s start analysis!!!" % self.strategy_name
        '''

        if self.monitor_iterm_key == 'kutype_running_flow_num':
            analysis_instance = KutypeRunningFlowNum(\
                                self.mon_iterm, self.meta_data, self.kutype_global_conf)
        elif self.monitor_iterm_key == 'kutype_running_job_num':
            analysis_instance = KutypeRunningJobNum(\
                                self.mon_iterm, self.meta_data, self.kutype_global_conf)
        elif self.monitor_iterm_key == 'kutype_failed_flow_num':
            analysis_instance = KutypeFailedFlowNum(\
                                self.mon_iterm, self.meta_data, self.kutype_global_conf)
        elif self.monitor_iterm_key == 'kutype_failed_job_num':
            analysis_instance = KutypeFailedJobNum(\
                                self.mon_iterm, self.meta_data, self.kutype_global_conf)
        elif self.monitor_iterm_key == 'kutype_cover_countdown':
            analysis_instance = KutypeCoverCountDown(\
                                self.mon_iterm, self.meta_data, self.kutype_global_conf)
        elif self.monitor_iterm_key == 'kutype_start_interval':
            analysis_instance = KutypeStartInterval(\
                                self.mon_iterm, self.meta_data, self.kutype_global_conf)
        else:
            return 0
            
        return analysis_instance.analysis()


if __name__ == "__main__":
    if Config.load_agent_debug_level() >= 3:
        modules.Util.append_log_content_to_file(Config.debug_public_strategy_file, 'test')
Пример #33
0
    def get_all_start_record(self, iter_cycle_dict):
        """Difficulty: running time and start interval without any relationship"""
        """            The current run is not necessarily up to date"""
        """[ |--------|                                                       ]"""
        """[     |--------|                                                   ]"""
        """[                |--------|                                        ]"""
        """[                           |XXXXXXXX|                             ]"""
        """[                                      |--------|                  ]"""

        """Initialization record abnormal kutype information"""
        project_link = self.compass_domain + '/manager?project='+\
                                self.project_name + '&flow=end#executions'
        flow_link = self.compass_domain + '/executor?execid='

        """  [ iteration_cycle ] ---> [[iteration1, cycle1], [iteration2, cycle2], ...]"""
        """  iter_cycle_dict dict example: {'59_39': 1494860631, '59_38': 1494853426}"""
        """  dict_key_list dict example: """
        dict_key_list = [[] for i in range(len(iter_cycle_dict.keys()))]
        for count in range(0, len(iter_cycle_dict.keys())):
            dict_key_list[count].append(int\
                    (re.split(r'_', iter_cycle_dict.keys()[count])[0]))
            dict_key_list[count].append(int\
                    (re.split(r'_', iter_cycle_dict.keys()[count])[1]))

        if Config.load_agent_debug_level() >= 3:
            modules.Util.append_log_content_to_file(Config.debug_public_strategy_file,\
                ' [' + self.kutype_name + '_kutype_start_interval] temp data: ' +\
                str(iter_cycle_dict) + '\n')

        """ for sort by iteration then cycle"""
        dict_key_list.sort(key=itemgetter(0, 1))

        for iteration_cycle_cursor in dict_key_list:
            """get current iteration and cycle id"""
            iteration_cursor_current = int(iteration_cycle_cursor[0])
            cycle_cursor_current = int(iteration_cycle_cursor[1])

            """get next iteration and cycle id"""
            if cycle_cursor_current == (self.total_cycle_num - 1):
                iteration_cursor_next = iteration_cursor_current + 1
                cycle_cursor_current_next = 0
            else:
                iteration_cursor_next = iteration_cursor_current
                cycle_cursor_current_next = cycle_cursor_current + 1

            """combination next key"""
            dict_key = str(iteration_cursor_next) + '_' + str(cycle_cursor_current_next)

            """missing the next iteration-cycle cursor, means: end key, or missing a build process"""
            """print interval time"""
            if (not iter_cycle_dict.has_key(dict_key)):
                dict_key = str(iteration_cursor_current) + '_' + str(cycle_cursor_current)
                unixformat_time = iter_cycle_dict[dict_key]
                self.current_unix_time = int(re.split(r'\.', str(time.time()))[0])
                start_interval_countdown_s = self.kutype_interval_time_s + unixformat_time\
                                    + self.kutype_interval_buffer_s - self.current_unix_time
                start_interval_countdown_h = start_interval_countdown_s / 3600.0
                start_interval_countdown_h = ("%.2f" % start_interval_countdown_h)
                break

        """check and update/insert kutype information in the ygg monitor table """
        """kutype_information_list: project_name, intertion_id, cycle_id, exec_id, self.starttime"""
        """                         error_analysis, monitor_iterm, error_status, current_status, error_starttime"""
        """                         errpr_apppeartime, link1, link2"""
        iteration_cycle_cursor = [iteration_cursor_current, cycle_cursor_current]
        get_kutype_info_instance = GetFlowinfoByCursor(self.metadata,\
                            iteration_cycle_cursor, unixformat_time)
        kutype_information_list = get_kutype_info_instance.get_flow_key_information()
        exec_id = kutype_information_list[3]
        flow_link = flow_link + str(exec_id) + '#jobslist'

        kutype_information_list.append('kutype_monitor')
        kutype_information_list.append(self.monitor_iterm)
        kutype_information_list.append('kutype-start-interval=' +\
                                        str(start_interval_countdown_h) + 'h')
        kutype_information_list.append('2')
        kutype_information_list.append(str(datetime.datetime.now()))
        kutype_information_list.append(project_link)
        kutype_information_list.append(flow_link)

        if Config.load_agent_debug_level() >= 3:
            modules.Util.append_log_content_to_file(Config.debug_public_strategy_file,\
                ' [' + self.kutype_name + '_kutype_start_interval] temp data: ' +\
                str(kutype_information_list) + '\n')

        if int(start_interval_countdown_s) <= 0:
            kutype_information_list[1] = iteration_cursor_next
            kutype_information_list[2] = cycle_cursor_current_next
            kutype_information_list[3] = '0000'
            kutype_information_list[8] = '0'
            kutype_information_list[4] = '0000-00-00 00:00:00'
            check_exists_ret = GeneralOperation.check_and_update_already_exists_kutype(\
                                                                kutype_information_list)
            if check_exists_ret == 255:
                GeneralOperation.insert_ygg_monitor_flow(kutype_information_list)
        else:
            kutype_information_list[8] = '1'
            GeneralOperation.check_and_update_already_exists_kutype(kutype_information_list)
            
        return start_interval_countdown_h
Пример #34
0
    def get_behind_bs_iteration_cycle(self, load_mysql_table_name='ygg_job'):
        """get total cycle range """
        """Thinking:"""
        """     mode1: full mode, just get bs the latest iteration."""
        """     mode2: roll mode, consider the missing of release db in the middle"""
        """mysql_data_iteration_min: means bs data version"""
        """mysql_data_cycle_min: means bs db number"""
        range_iteration_min = 0
        range_iteration_max = 0
        range_cycle_min = 0
        range_cycle_max = 0

        bs_current_iteration = 0
        bs_current_cycle = 0
        build_succ_iteration_cycle = list()
        """get bs current iteration - cycle"""
        sql_sentence = 'select * from ' + load_mysql_table_name +\
        ' where kuType=\'' + self.ku_monitor_trick_type +\
        '\' and jobName regexp \'^end\' and jobStatus=\'1\' order by iterId DESC, cycleId DESC limit 1;'
        if Config.load_agent_debug_level() >= 2:
            modules.Util.append_log_content_to_file(Config.debug_sql_sentence_file,\
                ' [' + self.thread_name + '] sql sentence: ' + sql_sentence + '\n')

        mysql_return_data = AccessMysqlData.load_mysql_data_by_table(\
            load_mysql_table_name, sql_sentence)

        if mysql_return_data == '1':
            logging.error('get bs cursor mysql operating failed.')
            return 1

        bs_current_iteration = int(mysql_return_data[0][4])
        bs_current_cycle = int(mysql_return_data[0][5])
        """find current total cycle range"""
        if ((int(bs_current_cycle) == 0) and (self.ku_total_cycle == 1)) or\
                (int(bs_current_cycle) == (self.ku_total_cycle - 1)):
            """full amount mode"""
            """roll mode, and current cycle is 0."""
            range_iteration_min = int(bs_current_iteration)

            sql_sentence = 'select iterId,cycleId from ' + load_mysql_table_name +\
                ' where kuType=\'' + str(self.ku_monitor_trick_type) +\
                '\' and iterId=\'' + str(range_iteration_min) +\
                '\' and jobName regexp \'^end\' and jobStatus=\'1\' order by cycleId ASC;'
        else:
            """roll mode, and current cycle is not 0."""
            range_iteration_min = int(bs_current_iteration) - 1
            range_iteration_max = int(bs_current_iteration)
            range_cycle_min = int(bs_current_cycle) + 1
            range_cycle_max = int(bs_current_cycle)

            sql_sentence = 'select iterId,cycleId from ' + load_mysql_table_name +\
                ' where ( kuType=\'' + str(self.ku_monitor_trick_type) +\
                '\' and jobName regexp \'^end\' and jobStatus=\'1\' )' +\
                ' and (( iterId=\'' + str(range_iteration_min) +\
                '\' and cycleId>=\'' + str(range_cycle_min) +\
                '\' ) or ( iterId=\'' + str(range_iteration_max) +\
                '\' and cycleId<=\'' + str(range_cycle_max) +\
                '\' )) order by cycleId ASC;'

        if Config.load_agent_debug_level() >= 2:
            modules.Util.append_log_content_to_file(Config.debug_sql_sentence_file,\
                ' [' + self.thread_name + '] get bs cursor sql sentence: ' + sql_sentence + '\n')
        build_succ_iteration_cycle = AccessMysqlData.load_mysql_data_by_table(\
            load_mysql_table_name, sql_sentence)

        if build_succ_iteration_cycle == '1':
            logging.error('get bs cursor mysql operating failed.')
            return 1

        bs_current_newest_cursor = self.get_newest_bs_cursor(
            build_succ_iteration_cycle)
        """print iteration and cycle id to status file"""
        status_content = ' - [' + self.kutype_name + '] - BS_NEWEST_ITERATION:'\
            + str(bs_current_newest_cursor['iteration']) + '\n'
        modules.Util.append_log_content_to_file(\
            self.kutype_monitor_iteration_file, status_content)
        status_content = ' - [' + self.kutype_name + '] - BS_NEWEST_CYCLE:'\
            + str(bs_current_newest_cursor['cycle']) + '\n'
        modules.Util.append_log_content_to_file(\
            self.kutype_monitor_cycle_file, status_content)

        return bs_current_newest_cursor
Пример #35
0
    def get_all_start_record(self, iter_cycle_dict):
        """Difficulty: running time and start interval without any relationship"""
        """            The current run is not necessarily up to date"""
        """[ |--------|                                                       ]"""
        """[     |--------|                                                   ]"""
        """[                |--------|                                        ]"""
        """[                           |XXXXXXXX|                             ]"""
        """[                                      |--------|                  ]"""
        """Initialization record abnormal kutype information"""
        project_link = self.compass_domain + '/manager?project='+\
                                self.project_name + '&flow=end#executions'
        flow_link = self.compass_domain + '/executor?execid='
        """  [ iteration_cycle ] ---> [[iteration1, cycle1], [iteration2, cycle2], ...]"""
        """  iter_cycle_dict dict example: {'59_39': 1494860631, '59_38': 1494853426}"""
        """  dict_key_list dict example: """
        dict_key_list = [[] for i in range(len(iter_cycle_dict.keys()))]
        for count in range(0, len(iter_cycle_dict.keys())):
            dict_key_list[count].append(int\
                    (re.split(r'_', iter_cycle_dict.keys()[count])[0]))
            dict_key_list[count].append(int\
                    (re.split(r'_', iter_cycle_dict.keys()[count])[1]))

        if Config.load_agent_debug_level() >= 3:
            modules.Util.append_log_content_to_file(Config.debug_public_strategy_file,\
                ' [' + self.kutype_name + '_kutype_start_interval] temp data: ' +\
                str(iter_cycle_dict) + '\n')
        """ for sort by iteration then cycle"""
        dict_key_list.sort(key=itemgetter(0, 1))

        for iteration_cycle_cursor in dict_key_list:
            """get current iteration and cycle id"""
            iteration_cursor_current = int(iteration_cycle_cursor[0])
            cycle_cursor_current = int(iteration_cycle_cursor[1])
            """get next iteration and cycle id"""
            if cycle_cursor_current == (self.total_cycle_num - 1):
                iteration_cursor_next = iteration_cursor_current + 1
                cycle_cursor_current_next = 0
            else:
                iteration_cursor_next = iteration_cursor_current
                cycle_cursor_current_next = cycle_cursor_current + 1
            """combination next key"""
            dict_key = str(iteration_cursor_next) + '_' + str(
                cycle_cursor_current_next)
            """missing the next iteration-cycle cursor, means: end key, or missing a build process"""
            """print interval time"""
            if (not iter_cycle_dict.has_key(dict_key)):
                dict_key = str(iteration_cursor_current) + '_' + str(
                    cycle_cursor_current)
                unixformat_time = iter_cycle_dict[dict_key]
                self.current_unix_time = int(
                    re.split(r'\.', str(time.time()))[0])
                start_interval_countdown_s = self.kutype_interval_time_s + unixformat_time\
                                    + self.kutype_interval_buffer_s - self.current_unix_time
                start_interval_countdown_h = start_interval_countdown_s / 3600.0
                start_interval_countdown_h = ("%.2f" %
                                              start_interval_countdown_h)
                break
        """check and update/insert kutype information in the ygg monitor table """
        """kutype_information_list: project_name, intertion_id, cycle_id, exec_id, self.starttime"""
        """                         error_analysis, monitor_iterm, error_status, current_status, error_starttime"""
        """                         errpr_apppeartime, link1, link2"""
        iteration_cycle_cursor = [
            iteration_cursor_current, cycle_cursor_current
        ]
        get_kutype_info_instance = GetFlowinfoByCursor(self.metadata,\
                            iteration_cycle_cursor, unixformat_time)
        kutype_information_list = get_kutype_info_instance.get_flow_key_information(
        )
        exec_id = kutype_information_list[3]
        flow_link = flow_link + str(exec_id) + '#jobslist'

        kutype_information_list.append('kutype_monitor')
        kutype_information_list.append(self.monitor_iterm)
        kutype_information_list.append('kutype-start-interval=' +\
                                        str(start_interval_countdown_h) + 'h')
        kutype_information_list.append('2')
        kutype_information_list.append(str(datetime.datetime.now()))
        kutype_information_list.append(project_link)
        kutype_information_list.append(flow_link)

        if Config.load_agent_debug_level() >= 3:
            modules.Util.append_log_content_to_file(Config.debug_public_strategy_file,\
                ' [' + self.kutype_name + '_kutype_start_interval] temp data: ' +\
                str(kutype_information_list) + '\n')

        if int(start_interval_countdown_s) <= 0:
            kutype_information_list[1] = iteration_cursor_next
            kutype_information_list[2] = cycle_cursor_current_next
            kutype_information_list[3] = '0000'
            kutype_information_list[8] = '0'
            kutype_information_list[4] = '0000-00-00 00:00:00'
            check_exists_ret = GeneralOperation.check_and_update_already_exists_kutype(\
                                                                kutype_information_list)
            if check_exists_ret == 255:
                GeneralOperation.insert_ygg_monitor_flow(
                    kutype_information_list)
        else:
            kutype_information_list[8] = '1'
            GeneralOperation.check_and_update_already_exists_kutype(
                kutype_information_list)

        return start_interval_countdown_h
Пример #36
0
    def start(self):
        """start method"""
        """
        print "flow - %s - start analysis!!!" % self.strategy_name
        """

        if self.monitor_iterm_key == 'flow_overtime_num':
            analysis_instance = FlowOvertimeNum(\
                    self.mon_iterm, self.meta_data, self.kutype_global_conf)
        else:
            return 0

        return analysis_instance.analysis()


if __name__ == "__main__":
    ku_global_conf_list = dict()
    ku_global_conf = Config.load_mysql_global_kutype_conf()
    kutype = 'wp'

    for raw in range(0, len(ku_global_conf)):
        sql_sentence = 'select * from ' + ku_global_conf[raw]['table'] + \
                        ' where kuType=\'' + kutype + '\';'
        ku_global_conf_list[ku_global_conf[raw]['data_name']] = \
            AccessMysqlData.load_mysql_data_by_conf(ku_global_conf[raw], sql_sentence)
    kutype_global_conf = ku_global_conf_list

    debug_instance = StartAnalysis('flow_overtime_num', 'test',
                                   kutype_global_conf)
    debug_instance.start()