Ejemplo n.º 1
0
def get_latest():
    r = redis.Redis(host=parameter.REDIS_SERVER(),
                    port=parameter.REDIS_PORT(),
                    decode_responses=True)
    latest = r.hgetall('latest')
    res_dict = {}
    if 'timestamp' not in latest:
        res_dict['date'] = 'No record'
    else:
        res_dict['date'] = datetime.fromtimestamp(int(
            latest['timestamp']), LOCAL_TZ).strftime(TIME_FORMAT)
        site_list = [item['site'] for item in parameter.SITE_CFG()]
        for site in site_list:
            if site in latest:
                if latest[site] == 'NOT RUNNING':
                    res_dict[site] = 'NOT RUNNING'
                    continue

                json_data = json.loads(latest[site])
                err_item_list = []
                for counter in json_data:
                    err_item_list.append('{} fail {} times'.format(
                        counter[5:], json_data[counter]))
                res_dict[site] = err_item_list
            else:
                res_dict[site] = 'pass'

    return jsonify(res_dict)
Ejemplo n.º 2
0
def fail_count():
    time_range = request.args.get('range', default=0, type=int)
    time_period = request.args.get('periodUnit', default='days', type=str)
    if time_range == 0:
        return render_template('failCount.html',
                               time_range=time_range,
                               time_period=time_period)

    if time_period == 'min':
        delta_time = timedelta(minutes=time_range)
    elif time_period == 'hr':
        delta_time = timedelta(hours=time_range)
    else:
        delta_time = timedelta(days=time_range)

    site_list = [item['site'] for item in parameter.SITE_CFG()]
    r = redis.Redis(host=parameter.REDIS_SERVER(),
                    port=parameter.REDIS_PORT(),
                    decode_responses=True)
    index, redis_key = r.scan(cursor=0,
                              match='LOG:*',
                              count=parameter.REDIS_SCAN_AMOUNT())
    redis_key.sort(reverse=True)

    target_ts = int((datetime.now() - delta_time).timestamp())
    res_data = {}
    for key in redis_key:
        hash_data = r.hgetall(key)
        timestamp = int(key[4:])
        if timestamp < target_ts:
            break

        for site in hash_data:
            json_data = json.loads(hash_data[site])
            if site not in res_data:
                res_data[site] = json_data
            else:
                res_data[site] = dict(
                    Counter(res_data[site]) + Counter(json_data))

    for site in res_data:
        err_item_list = []
        for counter in res_data[site]:
            err_item_list.append('{} fail {}次'.format(counter[5:],
                                                      res_data[site][counter]))
        res_data[site] = err_item_list

    return render_template('failCount.html',
                           timeRange=time_range,
                           timePeriod=time_period,
                           redisLog=res_data,
                           siteList=site_list)
Ejemplo n.º 3
0
def log_list():
    site_list = [item['site'] for item in parameter.SITE_CFG()]
    page = request.args.get('page', default=1, type=int)
    r = redis.Redis(host=parameter.REDIS_SERVER(),
                    port=parameter.REDIS_PORT(),
                    decode_responses=True)
    log_period = parameter.LOGGING_PERIOD()
    redis_log_list = []
    index, redis_key = r.scan(cursor=0,
                              match='LOG:*',
                              count=parameter.REDIS_SCAN_AMOUNT())
    redis_key.sort(reverse=True)
    log_num = len(redis_key)
    total_page = math.ceil(log_num / PAGE_SIZE)
    if page == 0:
        key_list = redis_key
    else:
        start = (page - 1) * PAGE_SIZE
        if page > total_page:
            key_list = []
        elif page == total_page:
            key_list = redis_key[start:]
        else:
            key_list = redis_key[start:(start + PAGE_SIZE)]

    for key in key_list:
        res_data = {}
        for site in site_list:
            res_data[site] = ''
        hash_data = r.hgetall(key)
        timestamp = int(key[4:])
        redislog_time = datetime.fromtimestamp(timestamp,
                                               LOCAL_TZ).strftime(TIME_FORMAT)
        res_data['logTime'] = redislog_time
        for site in hash_data:
            json_data = json.loads(hash_data[site])
            err_item_list = []
            for counter in json_data:
                err_item_list.append('{} fail {}次'.format(
                    counter[5:], json_data[counter]))
            res_data[site] = err_item_list
        redis_log_list.append(res_data)

    return render_template('logList.html',
                           redisLogList=redis_log_list,
                           totalPage=total_page,
                           currentPage=page,
                           pageAmount=PAGE_SIZE,
                           siteList=site_list,
                           logPeriod=log_period)
Ejemplo n.º 4
0
    def fail_alert(self, send_email=True):
        check_period = parameter.FAIL_ALERT_PERIOD_MINS()
        fail_thres = parameter.FAIL_ALERT_THRESHOLD()
        delta_time = timedelta(minutes=check_period)
        has_failure = False
        mail_content = ''

        site_list = [item['site'] for item in parameter.SITE_CFG()]
        r = redis.Redis(host=parameter.REDIS_SERVER(),
                        port=parameter.REDIS_PORT(),
                        decode_responses=True)
        index, redis_key = r.scan(cursor=0,
                                  match='LOG:*',
                                  count=parameter.REDIS_SCAN_AMOUNT())
        redis_key.sort(reverse=True)

        target_ts = int((datetime.now() - delta_time).timestamp())
        res_data = {}
        for key in redis_key:
            hash_data = r.hgetall(key)
            timestamp = int(key[4:])
            if timestamp < target_ts:
                break

            for site in hash_data:
                json_data = json.loads(hash_data[site])
                if site not in res_data:
                    res_data[site] = json_data
                else:
                    res_data[site] = dict(
                        Counter(res_data[site]) + Counter(json_data))

        for site in res_data:
            period_err = 0
            for counter in res_data[site]:
                period_err += res_data[site][counter]
            if period_err >= fail_thres:
                has_failure = True
                mail_content += '{}: hit fail alert threshold\n'.format(site)

        if has_failure:
            if send_email:
                self.send_fail_letter(parameter.FAIL_ALERT_SUB(), mail_content,
                                      None)
Ejemplo n.º 5
0
def login():
    if request.method == 'GET':
        return render_template('auth.html')
    r = redis.Redis(host=parameter.REDIS_SERVER(),
                    port=parameter.REDIS_PORT(),
                    decode_responses=True)
    user_id = request.form['userId']

    if r.hexists('users', user_id):
        hash_pw = hashlib.md5(request.form['password'].encode()).hexdigest()
        store_pw = json.loads(r.hget('users', user_id))['password']
        if hash_pw == store_pw:
            user = User()
            user.id = user_id
            login_user(user)
            return redirect('/log/newest')

    flash('Input wrong User ID or PASSWORD', 'warning')
    return render_template('auth.html')
Ejemplo n.º 6
0
def hook():
    all_args = request.args
    r = redis.Redis(host=parameter.REDIS_SERVER(),
                    port=parameter.REDIS_PORT(),
                    decode_responses=True)
    r.delete('hook')
    res_data = {}
    for key, val in request.args.items():
        r.hset('hook', key, unquote(val))
    res_data.update(request.args)
    if request.get_json():
        data = request.get_json()
        for key, val in data.items():
            r.hset('hook', key, json.dumps(val))
            res_data[key] = val
    else:
        data = request.get_data()
        if len(data) > 0:
            res_data['raw_res'] = str(data)

    return jsonify(res_data)
Ejemplo n.º 7
0
    def check_log(self, send_email=True):
        finish_pattern = re.compile(r'Ran (\d*) tests in (\d*).(\d*)s')
        fail_pattern = re.compile(r'FAIL: (\w*) ')
        error_pattern = re.compile(r'ERROR: (\w*) ')
        site_cfg = parameter.SITE_CFG()

        mail_content = ''
        mail_attach_list = []
        has_failure = False
        r = redis.Redis(host=parameter.REDIS_SERVER(),
                        port=parameter.REDIS_PORT(),
                        decode_responses=True)
        redis_log_time = int(datetime.now().timestamp())

        # clear last latest_redis_log
        r.delete('latest')
        r.hset('latest', 'timestamp', redis_log_time)

        for config in site_cfg:
            log_list = [
                f for f in glob.glob('/log/' + config['site'] +
                                     '_test_result_*.log')
            ]
            error_count = defaultdict(int)
            for log in log_list:
                log_content = ''
                fail_list = []
                error_list = []
                log_time = get_log_ts(log)
                if datetime.now() < (log_time + timedelta(
                        minutes=parameter.TEST_DURATION_MINUTES())):
                    continue

                with open(log, 'r') as f:
                    log_content = f.read()

                # Parsing test which finish successfully
                if len(finish_pattern.findall(log_content)) == 1:
                    fail_list = fail_pattern.findall(log_content)
                    error_list = error_pattern.findall(log_content)
                    if len(fail_list) > 0 or len(error_list) > 0:
                        has_failure = True
                        mail_content += 'Site: {} at {}: fail item\n'.format(
                            config['site'],
                            log_time.astimezone(
                                pytz.timezone(parameter.LOCAL_TIMEZONE())))
                        mail_content += '    FAIL: {}\n'.format(fail_list)
                        mail_content += '    ERROR: {}\n'.format(error_list)
                        mail_content += '\n'
                        mail_content += '\n'
                        mail_attach_list.append(log)
                        mail_attach_list.append(get_api_res_record(log))

                        for fail in fail_list:
                            error_count[fail] += 1

                        for error in error_list:
                            error_count[error] += 1
                    else:
                        os.remove(log)
                        os.remove(get_api_res_record(log))

                # Record test which hanged for long time.
                else:
                    has_failure = True
                    mail_content += 'Site: {}, at {} unfinished\n\n\n'.format(
                        config['site'],
                        log_time.astimezone(
                            pytz.timezone(parameter.LOCAL_TIMEZONE())))
                    os.remove(log)
                    os.remove(get_api_res_record(log))

            if error_count:
                mail_content += 'Fail of site {} in check period\n'.format(
                    config['site'])
                redis_data_dict = {}
                for key in error_count:
                    mail_content += '{}: {}\n'.format(key, error_count[key])
                    redis_data_dict[key] = error_count[key]
                mail_content += '\n\n'
                redis_data_key = 'LOG:' + str(redis_log_time)
                redis_data_field = config['site']
                r.hset(redis_data_key, redis_data_field,
                       json.dumps(redis_data_dict))
                r.hset('latest', redis_data_field, json.dumps(redis_data_dict))
                r.expire(redis_data_key, time=LOG_TIMEOUT_SECS)

            # If a site doesn't have testing log in recent check period, mark it in NOT RUNNING status
            elif not log_list:
                redis_data_field = config['site']
                r.hset('latest', redis_data_field, 'NOT RUNNING')
                has_failure = True
                mail_content += f'Site: {config["site"]} has no testing result in this checking period.\n\n\n'

        if has_failure:
            try:
                send_slack_msg(mail_content)
            except Exception as e:
                mail_content += f'Send msg to slack fail at: {e}'

            if send_email:
                self.send_fail_letter(parameter.NOTIF_MAIL_SUB(), mail_content,
                                      mail_attach_list)

            # Backup fail_log for download
            print(f'MA\n{mail_attach_list}')
            if mail_attach_list:
                zip_file = '/webpage/static/testLog/{}.zip'.format(
                    redis_log_time)
                compress_log_to_zip(zip_file, mail_attach_list)

                for rm_log in mail_attach_list:
                    os.remove(rm_log)