Пример #1
0
def start_server():
    """
    Start the server if needed. Only for Anchor and Overnight test
    """
    iterator = 0
    os.system('chmod +x ' + gp.server.get_full_path_executable())
    while iterator < 10:
        pid = os.popen('pgrep ' + gp.server.get_executable_name()).read()

        if pid != '':
            fun.os_system('kill -9 ' + pid)

        os.system(gp.server.get_full_path_executable() + ' >>/dev/null &')

        time.sleep(0.5)

        pid = os.popen('pgrep ' + gp.server.get_executable_name()).read()

        if pid == '':
            gp.print_log(gp.LogLevel.Normal,
                         'Can not start or find ServerAgora Process!!\n')
            iterator += 1
        else:
            gp.print_log(gp.LogLevel.Normal, 'Server PID is ' + pid)
            break
Пример #2
0
def clean_up(is_match):
    gp.print_log(gp.LogLevel.Normal, 'Cleaning Up Folders')
    anchor_result_dir = gp.result_dir[:-1] + '_anchor/'
    if gp.connection_type == gp.connection[1]:
        if is_match:
            gp.remove_dir(gp.cur_log_dir)
            gp.remove_dir(gp.ref_log_dir)
            gp.remove_dir(gp.result_dir)
            gp.remove_dir(anchor_result_dir)
            if gp.cur_platform != 'Linux':
                for seqIdx in gp.sequences:
                    gp.remove_dir(gp.sequence_dir + gp.sequences[seqIdx])
        else:
            gp.remove_dir(gp.result_dir)
            gp.remove_dir(anchor_result_dir)
    elif gp.connection_type == gp.connection[0]:
        if gp.mode == 'Overnight':
            if gp.save_to_backup_dir:
                gp.create_dir(gp.backup_log_dir)
                gp.zip_to_folder(gp.cur_log_dir[:-1] + '.zip', gp.cur_log_dir,
                                 gp.backup_log_dir)

                gp.create_dir(gp.temp_dir)
                zip_name = gp.cur_time + gp.folder_join + gp.cur_commit_id[
                    0:7] + '.zip'
                gp.zip_to_folder(zip_name, gp.result_dir, gp.temp_dir)

            gp.remove_dir(gp.cur_log_dir)
            gp.remove_dir(gp.ref_log_dir)
            gp.remove_dir(gp.result_dir)
        else:
            gp.remove_dir(anchor_result_dir)
            os_system('mv ' + gp.result_dir + ' ' + anchor_result_dir)
        gp.print_log(gp.LogLevel.Normal, 'Finish Cleaning Up Folders')
Пример #3
0
def generate_one_cell(value, lower_range=None, higher_range=None):
    cell = '<td>'
    fail = 0
    if isinstance(value, str):
        cell += value
    elif isinstance(value, tuple):
        for data in value:
            if lower_range is not None and higher_range is not None \
                    and (data < lower_range or data > higher_range):
                cell += '<span style="color: red">%.2f</span> / ' % data
                fail = 1
            else:
                cell += '%.2f / ' % data
        cell = cell[:-3]
    elif isinstance(value, float):
        if lower_range is not None and higher_range is not None and (
                value < lower_range or value > higher_range):
            cell += '<span style="color: red">%.2f</span>' % value
            fail = 1
        else:
            cell += '%.2f' % value
    else:
        gp.print_log(gp.LogLevel.Normal,
                     'Error type in GenerateCrossRowCell ' + str(type(value)))
        exit()
    cell += '</td>'
    return cell, fail
Пример #4
0
def send_server_test_email(content):
    subject = 'Server Overnight Test Result! Time: ' + time.strftime(
        '%Y-%m-%d-%H-%M-%S', time.localtime(time.time()))
    if gp.cur_platform == 'Linux':
        if type(gp.log_file) == file:
            gp.log_file.close()
            utils.send_alert_email2(
                [subject, content,
                 open('Overnight.log', 'rb').read()], [
                     '*****@*****.**', '*****@*****.**',
                     '*****@*****.**', '*****@*****.**'
                 ], 'outlook')
        else:
            utils.send_alert_email([(subject, content)], [
                '*****@*****.**', '*****@*****.**', '*****@*****.**',
                '*****@*****.**'
            ], 'outlook')
    else:
        trial_time = 0
        while trial_time < 5:
            mail_user = raw_input('Please enter your email: ')
            if mail_user == 'skip':
                gp.print_log(gp.LogLevel.Normal, 'skip sending the email')
                return
            mail_pass = getpass.getpass('Please enter your password: '******'smtp.office365.com', 587)
                smtp_obj.starttls()
                smtp_obj.ehlo()
                smtp_obj.login(mail_user, mail_pass)
                break
            except smtplib.SMTPException:
                trial_time += 1
                gp.print_log(gp.LogLevel.Normal,
                             'Error login data, please enter again!')

        if trial_time == 5:
            gp.print_log(
                gp.LogLevel.Normal,
                'Exceed maximum times of trial, please check your email and password again!'
            )
            smtp_obj.close()
            return

        sender = mail_user
        receivers = ['*****@*****.**']

        message = MIMEText(content, 'html', 'utf-8')

        message['Subject'] = Header(subject, 'utf-8')

        smtp_obj.sendmail(sender, receivers, message.as_string())
        smtp_obj.close()

    if gp.cur_platform == 'Linux' and gp.on_server == 0:
        gp.print_log(gp.LogLevel.Normal, 'Skip send email!')
    else:
        gp.print_log(gp.LogLevel.Normal, 'Success send the email!')
Пример #5
0
def generate_cross_row_cell(value, row=1):
    if isinstance(value, str):
        return '<td rowspan="' + str(row) + '">' + value + '</td>'
    elif isinstance(value, float):
        return '<td rowspan="' + str(row) + '">' + ('%.2f' % value) + '</td>'
    else:
        gp.print_log(gp.LogLevel.Normal,
                     'Error type in GenerateCrossRowCell ' + type(value))
        exit()
Пример #6
0
def running_with_delay(client):
    gp.print_log(
        gp.LogLevel.Debug,
        'Entering running_with_delay at directory ' + os.getcwd() +
        ' with client ' + client.case_ + ' UID ' + str(client.uid_))
    assert isinstance(client, Client)
    running_time = 0
    doc_list = os.listdir('./')
    for doc in doc_list:
        if re.search('_timestamp_', doc):
            running_time = calculate_running_time(doc)
            if running_time > client.duration_ + 2:
                gp.print_log(
                    gp.LogLevel.Normal, 'Error Running Case ' + client.case_ +
                    ' Client with UID ' + str(client.uid_) + ', Duration ' +
                    str(client.duration_) + '. Running Time not sufficient ' +
                    str(running_time) + ' seconds with documents ' + doc)
                gp.print_log(gp.LogLevel.Normal,
                             'Used capacity ' + str(gp.used_capacity.value))
                return True

    gp.print_log(
        gp.LogLevel.Debug, 'Success Running Case ' + client.case_ +
        ' Client with UID ' + str(client.uid_) + ', Duration ' +
        str(client.duration_) + ', Running Time totally sufficient ' +
        str(running_time) + ' seconds with dir ' + os.getcwd())

    return False
Пример #7
0
def analyse_data(scenario, anchor, test):
    anchor_dir = gp.generate_dir_path(anchor, scenario)
    test_dir = gp.generate_dir_path(test, scenario)
    is_match = True

    if os.path.isdir(test_dir) and os.path.isdir(anchor_dir):
        room_list = os.listdir(test_dir)
        for room in room_list:
            test_room_dir = gp.generate_dir_path(test_dir, room)
            anchor_room_dir = gp.generate_dir_path(anchor_dir, room)
            if os.path.isdir(test_room_dir) and os.path.isdir(anchor_room_dir):
                uid_list = os.listdir(test_room_dir)
                is_room_totally_match = True
                for uid in uid_list:
                    test_uid_dir = gp.generate_dir_path(test_room_dir, uid)
                    anchor_uid_dir = gp.generate_dir_path(anchor_room_dir, uid)
                    if not os.path.isdir(test_uid_dir):
                        continue

                    case = uid.split(gp.string_join)
                    case[0] = int(case[0])
                    if os.path.isdir(test_uid_dir) and os.path.isdir(
                            anchor_uid_dir):
                        result = analyse_encoder_data(anchor_uid_dir,
                                                      test_uid_dir)
                        is_match = is_match & result
                        is_room_totally_match = is_room_totally_match & result

                doc_list = os.listdir(test_room_dir)
                for doc in doc_list:
                    if re.search('crash.txt', doc):
                        is_room_totally_match = False
                        is_match = False

                doc_list = os.listdir(anchor_room_dir)
                for doc in doc_list:
                    if re.search('crash.txt', doc):
                        is_room_totally_match = False
                        is_match = False

                if is_room_totally_match:
                    gp.remove_dir(test_room_dir)
                    gp.remove_dir(anchor_room_dir)

    if is_match == 0:
        gp.print_log(gp.LogLevel.Normal, '\nMismatch Detected!!!\n')
    else:
        gp.print_log(gp.LogLevel.Normal, '\nAll Match!!!\n')

    return is_match
Пример #8
0
def analyse_encoder_for_one_room(room_dir, room):
    output = ''
    client_list = os.listdir(room_dir)
    for client in client_list:
        output += room + gp.string_join + client + '\n'
        client_dir = gp.generate_dir_path(room_dir, client)
        if os.path.isdir(client_dir):
            anchor_list = os.listdir(client_dir)
            for anchor_file in anchor_list:
                if re.search('enc_offline_test_0', anchor_file):
                    output += analyse_encoder_for_one_client(client_dir +
                                                             anchor_file)
                    break
        else:
            gp.print_log(gp.LogLevel.Normal,
                         'Error: Folders do not exist! ' + room_dir + client)
            exit()

    return output
Пример #9
0
def gen_dir(scenario):
    gp.print_log(gp.LogLevel.Normal, 'Generating directories...')
    gp.print_log(gp.LogLevel.Normal, 'Creating necessary folders...')
    result_secnario_dir = gp.generate_dir_path(gp.result_dir, scenario)
    gp.create_dir(result_secnario_dir)
    if gp.connection_type == gp.connection[1]:
        ref_log_scenario_dir = gp.generate_dir_path(gp.ref_log_dir, scenario)
        gp.create_dir(ref_log_scenario_dir)
    if gp.mode == 'Overnight' and not os.path.isdir(gp.backup_log_dir):
        gp.create_dir(gp.backup_log_dir)
    if gp.mode == 'Regression':
        if gp.connection_type == gp.connection[0]:
            gp.client.set_executable_dir(
                gp.generate_dir_path(gp.executable_dir, 'anchor'))
        else:
            gp.client.set_executable_dir(
                gp.generate_dir_path(gp.executable_dir, 'test'))
    else:
        gp.client.set_executable_dir(
            gp.generate_dir_path(gp.executable_dir, 'overnight'))
Пример #10
0
def analyse_decoder_for_one_room(room_dir, room):
    output = ''
    client_list = os.listdir(room_dir)
    for client in client_list:
        output += room + gp.string_join + client + '\n'
        client_dir = gp.generate_dir_path(room_dir, client)
        if os.path.isdir(client_dir):
            anchor_list = os.listdir(client_dir)
            result = []
            for anchor_file in anchor_list:
                if re.search('dec_offline_test', anchor_file):
                    result.append(
                        analyse_decoder_for_one_client(client_dir +
                                                       anchor_file))

            for data in result:
                found = False
                for anchor_file in anchor_list:
                    if re.search('vqmg_' + data[1][1] + '.', anchor_file):
                        vqmg_file = client_dir + anchor_file
                        fp = open(vqmg_file, 'r')
                        fp.readline()
                        tmp = fp.readline().split('\n')[0].split('\t')[-1]
                        fp.close()
                        data[1][0] = data[1][0] + 'vqmg\t' + tmp + '\n'
                        found = True

                if not found:
                    data[1][0] = data[1][0] + 'vqmg\t0\n'

            result.sort()
            for idx in range(0, len(result)):
                output += 'For' + gp.string_join + 'UID' + gp.string_join + str(
                    result[idx][0]) + '\n'
                output += result[idx][1][0]
        else:
            gp.print_log(gp.LogLevel.Normal,
                         'Error: Folders do not exist! ' + client_dir)
            exit()

    return output
Пример #11
0
def calculate_running_time(doc):
    gp.print_log(
        gp.LogLevel.Debug, 'Entering calculate_running_time at directory ' +
        os.getcwd() + ' with doc ' + doc)
    fp = open(doc, 'r')
    start_time = end_time = 0
    for lines in fp:
        end_time = int(lines.split('\n')[0])
        if start_time == 0:
            start_time = end_time

    gp.print_log(
        gp.LogLevel.Debug,
        'Start time is ' + str(start_time) + ', End time is ' + str(end_time))

    if end_time < start_time:
        end_time += 1000000

    fp.close()

    gp.print_log(
        gp.LogLevel.Debug, 'Finish calculate_running_time at directory ' +
        os.getcwd() + ' with doc ' + doc)

    return (end_time - start_time) / 1000
Пример #12
0
def find_latest_ref_log(commit_id):
    gp.ref_log_dir = ''
    ref_file = ''
    if os.path.isdir(gp.backup_log_dir):
        zip_list = os.listdir(gp.backup_log_dir)
        time_commit = 0
        time_run = 0
        for zip_file in zip_list:
            tmp = zip_file.split('.')[0].split(gp.folder_join)
            # to avoid .DS_Store file problem
            if tmp[0] != '':
                time_run_tmp = int(tmp[0])
                time_commit_id_tmp = tmp[1]
                time_commit_tmp = int(tmp[2])
                if commit_id != '':
                    if re.search(
                            commit_id,
                            time_commit_id_tmp) and time_run < time_run_tmp:
                        time_run = time_run_tmp
                        ref_file = zip_file
                else:
                    if time_commit < time_commit_tmp:
                        if time_run < time_run_tmp:
                            time_run = time_run_tmp
                            time_commit = time_commit_tmp
                            commit_id = tmp[1]
                            ref_file = zip_file

    if ref_file != '':
        gp.print_log(gp.LogLevel.Normal, 'Selected commit id is ' + commit_id)

        os.system('unzip -oq ' + gp.backup_log_dir + ref_file + ' -d ' +
                  gp.data_dir)
        gp.ref_log_dir = gp.generate_dir_path(gp.data_dir,
                                              ref_file.split('.')[0])

        return commit_id[0:7]

    gp.print_log(gp.LogLevel.Normal, 'No commit is available!!!')
    return ''
Пример #13
0
def scale_yuv(*args):
    seq_idx = args[0]
    gp.print_log(gp.LogLevel.Normal,
                 '\tScaling ' + gp.sequences[seq_idx] + '...')
    ori_sequence = gp.sequences[seq_idx] + '.yuv'
    sequence_dir = gp.generate_dir_path(gp.sequence_dir, gp.sequences[seq_idx])
    if not os.path.exists(sequence_dir):
        gp.create_dir(sequence_dir)
    if not os.path.exists(sequence_dir + ori_sequence):
        if gp.cur_platform == 'Linux':
            os_system('cp ' + gp.sequence_dir + gp.sequences[seq_idx] +
                      '.yuv ' + sequence_dir)
        else:
            os_system('unzip ' + gp.sequence_dir + gp.sequences[seq_idx] +
                      '.yuv.zip -d ' + sequence_dir)

    tmp = ori_sequence.split('_')
    ori_width = int(tmp[-2].split('x')[0])
    ori_height = int(tmp[-2].split('x')[1].split('p')[0])
    ori_fps = int(tmp[-2].split('x')[1].split('p')[1])

    for seq_name in gp.seq_candidates[seq_idx]:
        tmp_seq_name = seq_name.split('_')
        width = int(tmp_seq_name[3].split('p')[0].split('x')[0])
        height = int(tmp_seq_name[3].split('p')[0].split('x')[1])
        fps = int(tmp_seq_name[3].split('p')[1])

        scale_seq_name = ori_sequence
        scale_width = ori_width
        scale_height = ori_height
        if width * ori_height != height * ori_width:
            if width * 1.0 / height < ori_width * 1.0 / ori_height:
                scale_width = ori_height * width / height
                scale_height = ori_height
            else:
                scale_width = ori_width
                scale_height = ori_width * height / width

            scale_seq_name = tmp[0] + '_' + tmp[1] + '_' + tmp[2] + '_' \
                             + str(scale_width) + 'x' + str(scale_height) + 'p' \
                             + str(ori_fps) + '_' + tmp[4]
            if not os.path.exists(sequence_dir + scale_seq_name):
                os_system(gp.scale.get_full_path_executable() + ' ' +
                          str(ori_width) + ' ' + str(ori_height) + ' ' +
                          str(scale_width) + ' ' + str(scale_height) + ' ' +
                          sequence_dir + ori_sequence + ' ' + sequence_dir +
                          scale_seq_name + ' 4 1')
                gp.print_log(gp.LogLevel.Normal, 'Scaling ' + scale_seq_name)

        if not os.path.exists(sequence_dir + seq_name):
            os_system(gp.scale.get_full_path_executable() + ' ' +
                      str(scale_width) + ' ' + str(scale_height) + ' ' +
                      str(width) + ' ' + str(height) + ' ' + sequence_dir +
                      scale_seq_name + ' ' + sequence_dir + seq_name + ' 3 ' +
                      str(ori_fps) + ' ' + str(fps))
            gp.print_log(gp.LogLevel.Normal, 'Scaling ' + seq_name)
Пример #14
0
def run_client(*args):
    success = gp.RunningState.Unfinished
    cmd = ''
    client = gp.clients[args[0]]
    assert isinstance(client, Client)

    tmp = client.resolution_.split('x')
    width = int(tmp[0])
    height = int(tmp[1])
    try:
        cur_pid = os.popen('pgrep ' + gp.server.get_executable_name()).read()
        if gp.connection_type == gp.connection[0] and cur_pid == '':
            gp.print_log(
                gp.LogLevel.Normal, 'Can not find ServerAgora process, Exit ' +
                str(client.room_) + ' and uid ' + str(client.uid_))
            return

        time.sleep(1)

        client_dir = args[1]
        scenario = args[2]
        os.chdir(client_dir)
        cmd = gp.client.get_executable_name() + ' ' + str(client.room_) + ' ' + str(client.uid_) + ' ' \
              + str(client.fps_) + ' ' + str(width) + ' ' + str(height) + ' ' + str(client.rate_) + ' ' \
              + client.sequence_dir_ + client.anchor_seq_ + ' ' \
              + tc.suit[client.case_.split(gp.folder_join)[0]][0] + ' ' + tc.suit[client.case_.split(gp.folder_join)[0]][1] + ' ' \
              + tc.suit[client.case_.split(gp.folder_join)[0]][2] + ' ' + gp.networks[client.network_] + ' ' + str(client.duration_)

        start_time = time.time()
        if gp.cur_platform == 'Linux':
            result = os_system('LD_LIBRARY_PATH=' + gp.result_dir + scenario +
                               ' ./' + cmd)
        else:
            result = os_system('./' + cmd)
        end_time = time.time()

        time.sleep(1)
        # Clean the zero byte doc and also removing the time prefix
        doc_list = os.listdir('./')
        for doc in doc_list:
            if os.path.getsize(doc) == 0:
                os_system('rm -rf ' + doc)
            else:
                t = re.search('..?h_..?m_..?s_', doc)
                if t is not None:
                    new_file = doc.replace(t.group(0), '')
                    if not os.path.exists(new_file) or os.path.getsize(
                            new_file) < os.path.getsize(doc):
                        os_system('mv ' + doc + ' ' + new_file)
                    elif os.path.exists(new_file):
                        os_system('rm ' + doc)

        gp.print_log(
            gp.LogLevel.Debug,
            'Start checking time of ' + client.case_ + ' Client with UID ' +
            str(client.uid_) + ', Duration ' + str(client.duration_) +
            ', Running Time ' + str(end_time - start_time) + ' seconds.')

        if running_with_delay(client):
            return

        gp.print_log(
            gp.LogLevel.Debug,
            'Finish checking time of ' + client.case_ + ' Client with UID ' +
            str(client.uid_) + ', Duration ' + str(client.duration_) +
            ', Running Time ' + str(end_time - start_time) + ' seconds.')

        time.sleep(1)

        os.chdir(gp.data_dir)

        pid_temp = os.popen('pgrep ' + gp.server.get_executable_name()).read()
        if gp.connection_type == gp.connection[0] and (pid_temp == ''
                                                       or pid_temp != cur_pid):
            gp.print_log(
                gp.LogLevel.Normal,
                'ServerAgora is down, Re-run the process with case ' +
                client.case_ + ' and uid ' + str(client.uid_) +
                ' ' + time.strftime('%Y_%m_%d_%H_%M_%S',
                                    time.localtime(time.time())))
            return

        if end_time - start_time > client.duration_ + 6:
            gp.print_log(
                gp.LogLevel.Normal,
                'Case ' + client.case_ + ' Client with UID ' +
                str(client.uid_) + ', Duration ' + str(client.duration_) +
                ', Running Time ' + str(end_time - start_time) +
                ' seconds which is larger than expected ' +
                str(client.duration_))
            gp.print_log(gp.LogLevel.Normal, 'Command Line is ' + cmd)

        if result != 0:
            gp.print_log(
                gp.LogLevel.Normal, 'Case ' + client.case_ +
                ' Client with UID ' + str(client.uid_) + ', Duration ' +
                str(client.duration_) + ' Crashed!!!!!')
            gp.print_log(gp.LogLevel.Normal, 'Command Line is ' + cmd)
            success = gp.RunningState.Crash
            gp.scenario_crash.value += 1
        else:
            gp.print_log(
                gp.LogLevel.Debug,
                'Finish Running Case ' + client.case_ + ' Client with UID ' +
                str(client.uid_) + ', Duration ' + str(client.duration_) +
                ', Running Time ' + str(end_time - start_time) + ' seconds.')
            success = gp.RunningState.Success

    finally:
        if success == gp.RunningState.Unfinished:
            gp.print_log(
                gp.LogLevel.Normal, 'Rerun Running Case ' + client.case_ +
                ' Client with UID ' + str(client.uid_) + ', Duration ' +
                str(client.duration_) + '. Check it out!!!!!')
            gp.print_log(gp.LogLevel.Normal, 'Command Line is ' + cmd)

        if success == gp.RunningState.Crash:
            file_crash = open(client_dir + '../crash.txt', 'a')
            file_crash.write(cmd)
            file_crash.close()

        gp.process_lock.acquire()
        gp.client_flag[args[0]] = success
        gp.running_process.value -= 1
        gp.used_capacity.value -= client.capacity_
        gp.process_lock.release()
Пример #15
0
            gp.seq_candidates[key] = [self.anchor_seq_]


def config_test_case(scenario):
    gp.seq_candidates.clear()
    room_num = 0
    gp.clients = []
    for case in tc.cases:
        if re.search(scenario, case[0]) and re.search(gp.mode, case[0]):
            uid = 0
            for idx in range(0, case[1]):
                gp.clients.append(Client(case, idx, uid, case[1]))
                uid += 1
            room_num += 1

    gp.print_log(gp.LogLevel.Normal, 'Totally ' + str(room_num) + ' cases!')


def get_next_case():
    all_start = True
    all_finish = True
    start_idx = len(gp.clients)
    end_idx = len(gp.clients)

    for x in range(0, len(gp.clients)):
        if gp.client_flag[x] != gp.RunningState.Success:
            all_finish = False
        if gp.client_flag[x] == gp.RunningState.Unfinished or gp.client_flag[
                x] == gp.RunningState.Crash:
            all_start = False
    if all_finish is True or all_start is True:
Пример #16
0
def os_system(cmd):
    gp.print_log(gp.LogLevel.Info, cmd)
    # return os.system(cmd+' >>log.txt')
    return os.system(cmd + ' >>/dev/null')
Пример #17
0
def run_one_scenario(scenario):
    """
    Only run either Comm/Live/ScSh mode
    """
    fun.gen_dir(scenario)
    fun.check_files()

    p = multiprocessing.Pool()

    gp.print_log(gp.LogLevel.Normal, 'Scaling YUVs...')
    for seqIdx in gp.sequences:
        temp = (seqIdx, )
        p.apply_async(fun.scale_yuv, temp)
        # fun.scale_yuv(seqIdx)
    p.close()
    p.join()
    gp.print_log(gp.LogLevel.Normal, '')

    gp.client_flag = gp.mgr.list(
        [gp.RunningState.Unfinished for _ in range(len(gp.clients))])
    p = multiprocessing.Pool()
    gp.print_log(gp.LogLevel.Normal, 'Start offline test...')

    while 1:
        # Find a unfinished cases
        gp.process_lock.acquire()
        [all_start, all_finish, start_idx, end_idx] = fun.get_next_case()
        gp.process_lock.release()
        client_num = end_idx - start_idx

        if all_finish:
            break
        if all_start:
            time.sleep(1)
            continue

        require_capacity = 0
        for client_idx in range(start_idx, end_idx):
            client = gp.clients[client_idx]
            assert isinstance(client, fun.Client)
            require_capacity += client.capacity_

        # Hold to wait enough processors
        while 1:
            gp.process_lock.acquire()
            if gp.running_process.value + client_num <= gp.active_process \
                    and gp.used_capacity.value + require_capacity <= gp.capacity:
                gp.running_process.value += client_num
                gp.used_capacity.value += require_capacity
                gp.process_lock.release()
                break
            elif gp.used_capacity.value == 0 and require_capacity > gp.capacity:
                gp.print_log(
                    gp.LogLevel.Normal,
                    "Not enough capacity to run this case: " +
                    gp.clients[start_idx].case_)
            gp.process_lock.release()
            time.sleep(1)

        if gp.connection_type == gp.connection[0] and os.popen(
                'pgrep ' + gp.server.get_executable_name()).read() == '':
            os.system(gp.server.get_full_path_executable() + ' >>/dev/null &')
            gp.print_log(
                gp.LogLevel.Normal, 'Restart Server! Current Server PID is ' +
                os.popen('pgrep ' + gp.server.get_executable_name()).read())

        for client_idx in range(start_idx, end_idx):
            gp.process_lock.acquire()
            gp.client_flag[client_idx] = gp.RunningState.Running
            gp.process_lock.release()
            client = gp.clients[client_idx]
            assert isinstance(client, fun.Client)
            uid_dir_name = str(client.uid_) + gp.string_join + client.config_

            client_dir = gp.generate_dir_path(gp.result_dir, scenario,
                                              client.case_, uid_dir_name)
            gp.create_dir(client_dir)
            fun.os_system('cp ' + gp.network_dir +
                          gp.networks[client.network_] + ' ' + client_dir)
            gp.client.copy_executable_to_dir(client_dir)

            if gp.connection_type == gp.connection[1]:
                log_dir = gp.generate_dir_path(gp.cur_log_dir, scenario,
                                               client.case_, uid_dir_name)
                fun.os_system('cp ' + log_dir + 'enc_online_parameters* ' +
                              client_dir)

            gp.print_log(
                gp.LogLevel.Normal, 'Running Case ' + client.case_ +
                ' Client with UID ' + str(client.uid_) + ', Duration ' +
                str(client.duration_) + ', Time ' + time.strftime(
                    '%Y_%m_%d_%H_%M_%S', time.localtime(time.time())))
            p.apply_async(fun.run_client,
                          args=(client_idx, client_dir, scenario))
            # fun.run_client(client_idx, client_dir, scenario)

        gp.print_log(gp.LogLevel.Normal, '')

    p.close()
    p.join()

    if gp.mode == 'Overnight':
        gp.client_flag = gp.mgr.list(
            [gp.RunningState.Unfinished for _ in range(len(gp.clients))])
        p = multiprocessing.Pool()
        gp.print_log(gp.LogLevel.Normal, 'Start VQM test...')

        while 1:
            # Find a unfinished cases
            gp.process_lock.acquire()
            [all_start, all_finish, start_idx, end_idx] = fun.get_next_case()
            gp.process_lock.release()

            if all_finish:
                break
            if all_start:
                time.sleep(1)
                continue

            # Hold to wait enough processors
            for client_idx in range(start_idx, end_idx):
                while 1:
                    gp.process_lock.acquire()
                    if gp.running_process.value < gp.active_process / 2:
                        gp.running_process.value += 1
                        gp.process_lock.release()
                        break
                    gp.process_lock.release()
                    time.sleep(1)

                gp.process_lock.acquire()
                gp.client_flag[client_idx] = gp.RunningState.Running
                gp.process_lock.release()

                client = gp.clients[client_idx]
                assert isinstance(client, fun.Client)
                uid_dir_name = str(
                    client.uid_) + gp.string_join + client.config_
                client_dir = gp.generate_dir_path(gp.result_dir, scenario,
                                                  client.case_, uid_dir_name)
                gp.vqm_test.copy_executable_to_dir(client_dir)
                gp.decode_stream.copy_executable_to_dir(client_dir)
                gp.scale.copy_executable_to_dir(client_dir)

                gp.print_log(
                    gp.LogLevel.Normal, 'Running VQM Case ' + client.case_ +
                    ' Client with UID ' + str(client.uid_) + ', Duration ' +
                    str(client.duration_) + ', Time ' + time.strftime(
                        '%Y_%m_%d_%H_%M_%S', time.localtime(time.time())))
                p.apply_async(fun.run_vqm, args=(client_idx, client_dir))
                # fun.run_vqm(client_idx, client_dir)

            gp.print_log(gp.LogLevel.Normal, '')

        p.close()
        p.join()

    fun.saving_logs(scenario)

    if gp.scenario_crash.value != 0:
        gp.print_log(
            gp.LogLevel.Normal, 'Total ' + str(gp.scenario_crash.value) +
            ' crashes in for ' + scenario + '!!!')
        gp.total_crash += gp.scenario_crash.value
        gp.scenario_crash.value = 0
Пример #18
0
def run_test():
    """
    Start running the test
    """
    is_match = True
    for scenario in gp.scenario:
        fun.config_test_case(scenario)
        run_one_scenario(scenario)

        if gp.mode == 'Regression' and gp.connection_type == gp.connection[1]:
            gp.print_log(gp.LogLevel.Normal, 'Analyzing Results')
            is_match = is_match & ar.analyse_data(scenario, gp.cur_log_dir,
                                                  gp.ref_log_dir)
            gp.print_log(gp.LogLevel.Normal, 'Finish Analyzing Results')
        elif gp.mode == 'Overnight':
            gp.print_log(gp.LogLevel.Normal, 'Analyzing Results')
            ao.generate_data(scenario)
            gp.print_log(gp.LogLevel.Normal, 'Finish Analyzing Results')

        gp.print_log(gp.LogLevel.Normal, '')

    if gp.mode == 'Overnight' and gp.ref_log_dir != '':
        gp.print_log(gp.LogLevel.Normal, 'Comparing Results')
        result = ad.compare_data()
        gp.print_log(gp.LogLevel.Normal, 'Finish Comparing Results')

    gp.print_log(gp.LogLevel.Normal, '')

    if gp.mode == 'Regression' and gp.connection_type == gp.connection[1]:
        result = ar.output_mismatch_case(gp.cur_log_dir, gp.ref_log_dir)
        fun.send_unit_test_email(result)
    else:
        if gp.mode == 'Overnight' and gp.ref_log_dir != '' and gp.on_server == 1:
            fun.send_server_test_email(result)

        pid = os.popen('pgrep ' + gp.server.get_executable_name()).read()
        if pid != '':
            fun.os_system('kill -9 ' + pid)

    fun.clean_up(is_match)

    gp.print_log(gp.LogLevel.Normal, 'Finish Running Client!!!!')
Пример #19
0
def send_unit_test_email(content):
    header = '<html>\n<head>\n'
    header += '<h1>\nThis email is the regression comparison results\n</h1>'
    # In Regression, the cur_log_dir is the old one and ref is the new one
    header += '<h2>\n Current Commit:\n</h2>' + gp.read_commit_log(
        gp.ref_log_dir)
    header += '<h2>\n Ref Commit:\n</h2>' + gp.read_commit_log(gp.cur_log_dir)

    if gp.total_crash != 0:
        header += '<h2>\n Total Crash: ' + str(gp.total_crash) + '\n</h2>'
    else:
        header += '<h2>\n No Crash.\n</h2>'
    content = header + content

    if gp.cur_platform == 'Linux':
        subject = 'Server Regression Test Result! Time: ' \
                  + time.strftime('%Y-%m-%d-%H-%M-%S', time.localtime(time.time()))
        if type(gp.log_file) == file:
            gp.log_file.close()
            utils.send_alert_email2(
                [subject, content,
                 open('Regression.log', 'rb').read()], [
                     '*****@*****.**', '*****@*****.**',
                     '*****@*****.**', '*****@*****.**'
                 ], 'outlook')
        else:
            utils.send_alert_email([(subject, content)], [
                '*****@*****.**', '*****@*****.**', '*****@*****.**',
                '*****@*****.**'
            ], 'outlook')
    else:
        trial_time = 0
        while trial_time < 5:
            mail_user = raw_input('Please enter your email: ')
            if mail_user == 'skip':
                gp.print_log(gp.LogLevel.Normal, 'skip sending the email')
                return
            mail_pass = getpass.getpass('Please enter your password: '******'smtp.office365.com', 587)
                smtp_obj.starttls()
                smtp_obj.ehlo()
                smtp_obj.login(mail_user, mail_pass)
                break
            except smtplib.SMTPException:
                trial_time += 1
                gp.print_log(gp.LogLevel.Normal,
                             'Error login data, please enter again!')

        if trial_time == 5:
            gp.print_log(
                gp.LogLevel.Normal,
                'Exceed maximum times of trial, please check your email and password again!'
            )
            smtp_obj.close()
            return

        sender = mail_user
        receivers = ['*****@*****.**']

        body = '<h1>The Offline Test anchor has been updated</h1>\n'
        mail_body = raw_input('Please enter a short description in one line: ')
        body = body + '<p>' + mail_body + '</p>\n' + content
        message = MIMEText(body, 'html', 'utf-8')

        subject = 'Offline Test Anchor Update'
        message['Subject'] = Header(subject, 'utf-8')

        smtp_obj.sendmail(sender, receivers, message.as_string())
        smtp_obj.close()

    gp.print_log(gp.LogLevel.Normal, 'Success send the email!')
Пример #20
0
import multiprocessing
import os
import sys
import time

import analyse_data as ad
import analyse_overnight as ao
import analyse_regression as ar
import functions as fun
import global_parameters as gp
from pdb import set_trace as bp

os.chdir(gp.data_dir)

gp.print_log(gp.LogLevel.Normal, 'Offline test for AgoraRTCEngine')
gp.print_log(gp.LogLevel.Normal, 'Version ' + str(gp.script_version) + '\n')


def start_server():
    """
    Start the server if needed. Only for Anchor and Overnight test
    """
    iterator = 0
    os.system('chmod +x ' + gp.server.get_full_path_executable())
    while iterator < 10:
        pid = os.popen('pgrep ' + gp.server.get_executable_name()).read()

        if pid != '':
            fun.os_system('kill -9 ' + pid)
Пример #21
0
def check_files():
    gp.print_log(gp.LogLevel.Normal, 'Checking files...')
    if not os.path.exists(gp.scale.get_full_path_executable()):
        gp.print_log(gp.LogLevel.Normal,
                     '\tERROR!!! missing ' + gp.scale.get_executable_name())
        gp.print_log(gp.LogLevel.Normal, '\tExit.')
        exit()

    if not os.path.exists(gp.client.get_full_path_executable()):
        gp.print_log(gp.LogLevel.Normal,
                     '\tERROR!!! missing ' + gp.client.get_executable_name())
        gp.print_log(gp.LogLevel.Normal, '\tExit.')
        exit()

    if not os.path.exists(
            gp.vqm_test.get_full_path_executable()) and gp.mode == 'Overnight':
        gp.print_log(gp.LogLevel.Normal,
                     '\tERROR!!! missing ' + gp.vqm_test.get_executable_name())
        gp.print_log(gp.LogLevel.Normal, '\tExit.')
        exit()

    if not os.path.exists(gp.decode_stream.get_full_path_executable()
                          ) and gp.mode == 'Overnight':
        gp.print_log(
            gp.LogLevel.Normal,
            '\tERROR!!! missing ' + gp.decode_stream.get_executable_name())
        gp.print_log(gp.LogLevel.Normal, '\tExit.')
        exit()

    for seq_idx in gp.sequences:
        if gp.cur_platform == 'Linux':
            if not os.path.exists(gp.sequence_dir + gp.sequences[seq_idx] +
                                  '.yuv'):
                gp.print_log(
                    gp.LogLevel.Normal, '\tERROR!!! missing ' +
                    gp.sequence_dir + gp.sequences[seq_idx] + '.yuv')
                gp.print_log(gp.LogLevel.Normal, '\tExit.')
                exit()
        else:
            if not os.path.exists(gp.sequence_dir + gp.sequences[seq_idx] +
                                  '.yuv.zip'):
                gp.print_log(
                    gp.LogLevel.Normal, '\tERROR!!! missing ' +
                    gp.sequence_dir + gp.sequences[seq_idx] + '.yuv.zip')
                gp.print_log(gp.LogLevel.Normal, '\tExit.')
                exit()

    for network_idx in range(0, len(gp.networks)):
        if not os.path.exists(gp.network_dir + gp.networks[network_idx]):
            gp.print_log(
                gp.LogLevel.Normal, '\tERROR!!! missing ' + gp.network_dir +
                gp.networks[network_idx])
            gp.print_log(gp.LogLevel.Normal, '\tExit.')
            exit()

    gp.print_log(gp.LogLevel.Normal, '\tAll Found!!!\n')
Пример #22
0
 def is_recorded_data_type(self, data_type, ref_type):
     if data_type not in gp.data_type:
         gp.print_log(gp.LogLevel.Normal, 'data_type ' + data_type + ' is not in data_type_!')
     return data_type in gp.data_type and self.map_[data_type] != -1 and data_type == ref_type
Пример #23
0
def run_vqm(*args):
    success = gp.RunningState.Unfinished
    try:
        client = gp.clients[args[0]]
        assert isinstance(client, Client)

        tmp = client.resolution_.split('x')
        width = int(tmp[0])
        height = int(tmp[1])

        if width * height > 1280 * 720:
            gp.print_log(gp.LogLevel.Normal,
                         'VQM skip due to large resolution')
            success = gp.RunningState.Success
            return

        client_dir = args[1]
        os.chdir(client_dir)

        vqm_result = 0
        doc_list = os.listdir('./')
        for doc in doc_list:
            if re.search('dec_save_stream_received', doc):
                handle = doc.split('.')[0].split('_')[-1]
                decode_yuv = 'dec_save_reconstructed_yuv_' + handle + '.yuv'
                os_system('./' + gp.decode_stream.get_executable_name() + ' ' +
                          doc + ' ' + decode_yuv)
                for score_file in doc_list:
                    if re.search('dec_quality_score_' + handle, score_file):
                        break
                for uid_file in doc_list:
                    if re.search('dec_offline_test_' + handle, uid_file):
                        fp = open(uid_file, 'r')
                        fp.readline()
                        data = fp.readline()
                        ref_uid = int(data.split('\t')[1])
                        fp.close()
                        break
                ref_sequence = find_corresponding_orig_yuv(
                    client.case_, ref_uid)
                vqm_result = os_system('./' +
                                       gp.vqm_test.get_executable_name() +
                                       ' ' + str(ref_sequence) + ' ' +
                                       decode_yuv + ' ' + str(score_file) +
                                       ' ' + str(width) + ' ' + str(height) +
                                       ' ' + str(client.fps_) + ' ' +
                                       str(client.duration_) + ' psnr_ssim_' +
                                       handle + '.xls' + ' vqmg_' + handle +
                                       '.xls')
                os.system('rm *.yuv')
        os.chdir(gp.data_dir)

        if vqm_result != 0:
            gp.print_log(gp.LogLevel.Normal, 'VQM return ' + str(vqm_result))
            success = gp.RunningState.Success
        else:
            success = gp.RunningState.Success

    finally:
        gp.process_lock.acquire()
        gp.client_flag[args[0]] = success
        gp.running_process.value -= 1
        gp.process_lock.release()
Пример #24
0
        for case in self.case_set_:
            case.sort_client()

        self.case_set_.sort()

        dec_file = open(dec_file_name, 'r')
        line = dec_file.readline()
        while line != '':
            if re.search('Net', line):
                decoded_uid = int(line.split('\n')[0].split(gp.string_join)[1])
                case_name = line.split('\n')[0].split(gp.string_join)[0]
            elif re.search('For' + gp.string_join + 'UID', line):
                uid = int(line.split('\n')[0].split(gp.string_join)[-1])
                client = self.find_client_by_case_and_uid(case_name, uid)
                if client == 0:
                    gp.print_log(gp.LogLevel.Normal,
                                 'Can not find client by UID ' + str(uid))
                    exit()
                client.add_one_decode_client(decoded_uid)
            else:
                client.add_one_dec_result(line)
            line = dec_file.readline()
        dec_file.close()

    def find_case(self, case):
        for idx in range(0, len(self.case_set_)):
            if self.case_set_[idx].is_same_case(case):
                return self.case_set_[idx]
        return 0

    def find_client_by_case_and_uid(self, case_name, uid):
        for case in self.case_set_: