Exemplo n.º 1
0
def output_mismatch_case(anchor, test):
    result = '<html>\n<head>\n'

    for scenario in gp.scenario:
        anchor_dir = gp.generate_dir_path(anchor, scenario)
        test_dir = gp.generate_dir_path(test, scenario)
        header = '<hr>\n'
        header += '<h2>\nComparing ' + scenario + ' result:\n</h2>\n'
        result += header
        mismatch_packet = MisMatchPacket()
        crash_packet = MisMatchPacket()
        if os.path.isdir(test_dir):
            room_list = os.listdir(test_dir)
            for room in room_list:
                test_room_dir = gp.generate_dir_path(test_dir, room)
                anchor_room_dir = gp.generate_dir_path(anchor_dir, room)
                if os.path.isdir(test_room_dir) and os.path.isdir(
                        anchor_room_dir):
                    is_crash = False

                    doc_list = os.listdir(test_room_dir)
                    for doc in doc_list:
                        if re.search('crash.txt', doc):
                            is_crash = True

                    doc_list = os.listdir(anchor_room_dir)
                    for doc in doc_list:
                        if re.search('crash.txt', doc):
                            is_crash = True

                    if is_crash:
                        crash_packet = add_one_case(crash_packet, room)
                    else:
                        mismatch_packet = add_one_case(mismatch_packet, room)

            crash_packet = finish_one_case(crash_packet)
            mismatch_packet = finish_one_case(mismatch_packet)

            if crash_packet.number_ == 0 and mismatch_packet.number_ == 0:
                result += '<p>All Match!!!</p>\n'
            else:
                if mismatch_packet.number_ != 0:
                    result += '<p>Mismatch Case:</p>\n'
                    result += mismatch_packet.case_
                if crash_packet.number_ != 0:
                    result += '<p>Crash Case:</p>\n'
                    result += crash_packet.case_

    result += '</head>\n</html>\n'
    return result
Exemplo n.º 2
0
class Client:
    def __init__(self, case, idx, uid, total_client):
        orig_sequence = gp.sequences[case[7][idx]]
        self.case_ = case[0]
        self.duration_ = case[2]
        self.resolution_ = case[3][idx]
        self.rate_ = case[4][idx]
        self.fps_ = case[5][idx]
        self.network_ = case[6][idx]
        self.sequence_dir_ = gp.generate_dir_path(gp.sequence_dir,
                                                  orig_sequence)
        self.room_ = int(self.case_.split(gp.folder_join)[-1], 16)
        self.uid_ = uid
        self.capacity_ = int(self.resolution_.split('x')[0]) * int(self.resolution_.split('x')[1]) \
                         * self.fps_ * (2 + total_client - 1) / 2
        tmp = orig_sequence.split('_')
        self.anchor_seq_ = tmp[0] + '_' + tmp[1] + '_' + tmp[2] + '_' + self.resolution_ + 'p' + str(self.fps_) \
                           + '_' + tmp[4] + '.yuv'
        self.config_ = 'Rate' + str(self.rate_) + gp.folder_join + 'Net' + str(self.network_) + gp.folder_join \
                       + 'FPS' + str(self.fps_) + gp.folder_join + 'Res' + self.resolution_ + gp.folder_join \
                       + 'Seq' + tmp[1]

        key = case[7][idx]
        if key in gp.seq_candidates:
            if self.anchor_seq_ not in gp.seq_candidates[key]:
                gp.seq_candidates[key].append(self.anchor_seq_)
        else:
            gp.seq_candidates[key] = [self.anchor_seq_]
Exemplo n.º 3
0
def copy_corresponding_log(case_name, scenario):
    case_dir = gp.generate_dir_path(gp.problem_case_dir, scenario, case_name)
    if os.path.exists(case_dir):
        return

    cur_case = find_corresponding_case_path(case_name, scenario,
                                            gp.cur_log_dir)
    dest_dir = gp.generate_dir_path(gp.problem_case_dir, scenario, case_name,
                                    'cur')
    gp.create_dir(dest_dir)
    os.system('cp -rf ' + cur_case + '* ' + dest_dir)

    ref_case = find_corresponding_case_path(case_name, scenario,
                                            gp.ref_log_dir)
    dest_dir = gp.generate_dir_path(gp.problem_case_dir, scenario, case_name,
                                    'ref')
    gp.create_dir(dest_dir)
    os.system('cp -rf ' + ref_case + '* ' + dest_dir)
Exemplo n.º 4
0
def scale_yuv(*args):
    seq_idx = args[0]
    gp.print_log(gp.LogLevel.Normal,
                 '\tScaling ' + gp.sequences[seq_idx] + '...')
    ori_sequence = gp.sequences[seq_idx] + '.yuv'
    sequence_dir = gp.generate_dir_path(gp.sequence_dir, gp.sequences[seq_idx])
    if not os.path.exists(sequence_dir):
        gp.create_dir(sequence_dir)
    if not os.path.exists(sequence_dir + ori_sequence):
        if gp.cur_platform == 'Linux':
            os_system('cp ' + gp.sequence_dir + gp.sequences[seq_idx] +
                      '.yuv ' + sequence_dir)
        else:
            os_system('unzip ' + gp.sequence_dir + gp.sequences[seq_idx] +
                      '.yuv.zip -d ' + sequence_dir)

    tmp = ori_sequence.split('_')
    ori_width = int(tmp[-2].split('x')[0])
    ori_height = int(tmp[-2].split('x')[1].split('p')[0])
    ori_fps = int(tmp[-2].split('x')[1].split('p')[1])

    for seq_name in gp.seq_candidates[seq_idx]:
        tmp_seq_name = seq_name.split('_')
        width = int(tmp_seq_name[3].split('p')[0].split('x')[0])
        height = int(tmp_seq_name[3].split('p')[0].split('x')[1])
        fps = int(tmp_seq_name[3].split('p')[1])

        scale_seq_name = ori_sequence
        scale_width = ori_width
        scale_height = ori_height
        if width * ori_height != height * ori_width:
            if width * 1.0 / height < ori_width * 1.0 / ori_height:
                scale_width = ori_height * width / height
                scale_height = ori_height
            else:
                scale_width = ori_width
                scale_height = ori_width * height / width

            scale_seq_name = tmp[0] + '_' + tmp[1] + '_' + tmp[2] + '_' \
                             + str(scale_width) + 'x' + str(scale_height) + 'p' \
                             + str(ori_fps) + '_' + tmp[4]
            if not os.path.exists(sequence_dir + scale_seq_name):
                os_system(gp.scale.get_full_path_executable() + ' ' +
                          str(ori_width) + ' ' + str(ori_height) + ' ' +
                          str(scale_width) + ' ' + str(scale_height) + ' ' +
                          sequence_dir + ori_sequence + ' ' + sequence_dir +
                          scale_seq_name + ' 4 1')
                gp.print_log(gp.LogLevel.Normal, 'Scaling ' + scale_seq_name)

        if not os.path.exists(sequence_dir + seq_name):
            os_system(gp.scale.get_full_path_executable() + ' ' +
                      str(scale_width) + ' ' + str(scale_height) + ' ' +
                      str(width) + ' ' + str(height) + ' ' + sequence_dir +
                      scale_seq_name + ' ' + sequence_dir + seq_name + ' 3 ' +
                      str(ori_fps) + ' ' + str(fps))
            gp.print_log(gp.LogLevel.Normal, 'Scaling ' + seq_name)
Exemplo n.º 5
0
def gen_dir(scenario):
    gp.print_log(gp.LogLevel.Normal, 'Generating directories...')
    gp.print_log(gp.LogLevel.Normal, 'Creating necessary folders...')
    result_secnario_dir = gp.generate_dir_path(gp.result_dir, scenario)
    gp.create_dir(result_secnario_dir)
    if gp.connection_type == gp.connection[1]:
        ref_log_scenario_dir = gp.generate_dir_path(gp.ref_log_dir, scenario)
        gp.create_dir(ref_log_scenario_dir)
    if gp.mode == 'Overnight' and not os.path.isdir(gp.backup_log_dir):
        gp.create_dir(gp.backup_log_dir)
    if gp.mode == 'Regression':
        if gp.connection_type == gp.connection[0]:
            gp.client.set_executable_dir(
                gp.generate_dir_path(gp.executable_dir, 'anchor'))
        else:
            gp.client.set_executable_dir(
                gp.generate_dir_path(gp.executable_dir, 'test'))
    else:
        gp.client.set_executable_dir(
            gp.generate_dir_path(gp.executable_dir, 'overnight'))
Exemplo n.º 6
0
def analyse_encoder_for_one_room(room_dir, room):
    output = ''
    client_list = os.listdir(room_dir)
    for client in client_list:
        output += room + gp.string_join + client + '\n'
        client_dir = gp.generate_dir_path(room_dir, client)
        if os.path.isdir(client_dir):
            anchor_list = os.listdir(client_dir)
            for anchor_file in anchor_list:
                if re.search('enc_offline_test_0', anchor_file):
                    output += analyse_encoder_for_one_client(client_dir +
                                                             anchor_file)
                    break
        else:
            gp.print_log(gp.LogLevel.Normal,
                         'Error: Folders do not exist! ' + room_dir + client)
            exit()

    return output
Exemplo n.º 7
0
def analyse_data(scenario, anchor, test):
    anchor_dir = gp.generate_dir_path(anchor, scenario)
    test_dir = gp.generate_dir_path(test, scenario)
    is_match = True

    if os.path.isdir(test_dir) and os.path.isdir(anchor_dir):
        room_list = os.listdir(test_dir)
        for room in room_list:
            test_room_dir = gp.generate_dir_path(test_dir, room)
            anchor_room_dir = gp.generate_dir_path(anchor_dir, room)
            if os.path.isdir(test_room_dir) and os.path.isdir(anchor_room_dir):
                uid_list = os.listdir(test_room_dir)
                is_room_totally_match = True
                for uid in uid_list:
                    test_uid_dir = gp.generate_dir_path(test_room_dir, uid)
                    anchor_uid_dir = gp.generate_dir_path(anchor_room_dir, uid)
                    if not os.path.isdir(test_uid_dir):
                        continue

                    case = uid.split(gp.string_join)
                    case[0] = int(case[0])
                    if os.path.isdir(test_uid_dir) and os.path.isdir(
                            anchor_uid_dir):
                        result = analyse_encoder_data(anchor_uid_dir,
                                                      test_uid_dir)
                        is_match = is_match & result
                        is_room_totally_match = is_room_totally_match & result

                doc_list = os.listdir(test_room_dir)
                for doc in doc_list:
                    if re.search('crash.txt', doc):
                        is_room_totally_match = False
                        is_match = False

                doc_list = os.listdir(anchor_room_dir)
                for doc in doc_list:
                    if re.search('crash.txt', doc):
                        is_room_totally_match = False
                        is_match = False

                if is_room_totally_match:
                    gp.remove_dir(test_room_dir)
                    gp.remove_dir(anchor_room_dir)

    if is_match == 0:
        gp.print_log(gp.LogLevel.Normal, '\nMismatch Detected!!!\n')
    else:
        gp.print_log(gp.LogLevel.Normal, '\nAll Match!!!\n')

    return is_match
Exemplo n.º 8
0
def analyse_decoder_for_one_room(room_dir, room):
    output = ''
    client_list = os.listdir(room_dir)
    for client in client_list:
        output += room + gp.string_join + client + '\n'
        client_dir = gp.generate_dir_path(room_dir, client)
        if os.path.isdir(client_dir):
            anchor_list = os.listdir(client_dir)
            result = []
            for anchor_file in anchor_list:
                if re.search('dec_offline_test', anchor_file):
                    result.append(
                        analyse_decoder_for_one_client(client_dir +
                                                       anchor_file))

            for data in result:
                found = False
                for anchor_file in anchor_list:
                    if re.search('vqmg_' + data[1][1] + '.', anchor_file):
                        vqmg_file = client_dir + anchor_file
                        fp = open(vqmg_file, 'r')
                        fp.readline()
                        tmp = fp.readline().split('\n')[0].split('\t')[-1]
                        fp.close()
                        data[1][0] = data[1][0] + 'vqmg\t' + tmp + '\n'
                        found = True

                if not found:
                    data[1][0] = data[1][0] + 'vqmg\t0\n'

            result.sort()
            for idx in range(0, len(result)):
                output += 'For' + gp.string_join + 'UID' + gp.string_join + str(
                    result[idx][0]) + '\n'
                output += result[idx][1][0]
        else:
            gp.print_log(gp.LogLevel.Normal,
                         'Error: Folders do not exist! ' + client_dir)
            exit()

    return output
Exemplo n.º 9
0
def find_latest_ref_log(commit_id):
    gp.ref_log_dir = ''
    ref_file = ''
    if os.path.isdir(gp.backup_log_dir):
        zip_list = os.listdir(gp.backup_log_dir)
        time_commit = 0
        time_run = 0
        for zip_file in zip_list:
            tmp = zip_file.split('.')[0].split(gp.folder_join)
            # to avoid .DS_Store file problem
            if tmp[0] != '':
                time_run_tmp = int(tmp[0])
                time_commit_id_tmp = tmp[1]
                time_commit_tmp = int(tmp[2])
                if commit_id != '':
                    if re.search(
                            commit_id,
                            time_commit_id_tmp) and time_run < time_run_tmp:
                        time_run = time_run_tmp
                        ref_file = zip_file
                else:
                    if time_commit < time_commit_tmp:
                        if time_run < time_run_tmp:
                            time_run = time_run_tmp
                            time_commit = time_commit_tmp
                            commit_id = tmp[1]
                            ref_file = zip_file

    if ref_file != '':
        gp.print_log(gp.LogLevel.Normal, 'Selected commit id is ' + commit_id)

        os.system('unzip -oq ' + gp.backup_log_dir + ref_file + ' -d ' +
                  gp.data_dir)
        gp.ref_log_dir = gp.generate_dir_path(gp.data_dir,
                                              ref_file.split('.')[0])

        return commit_id[0:7]

    gp.print_log(gp.LogLevel.Normal, 'No commit is available!!!')
    return ''
Exemplo n.º 10
0
def generate_data(scenario):
    enc_result_file = open(
        gp.cur_log_dir + 'Enc_File_' + scenario + '_' + gp.cur_time + '_' +
        gp.cur_commit_id + '.txt', 'w')
    dec_result_file = open(
        gp.cur_log_dir + 'Dec_File_' + scenario + '_' + gp.cur_time + '_' +
        gp.cur_commit_id + '.txt', 'w')

    anchor_dir = gp.cur_log_dir + scenario
    if os.path.isdir(anchor_dir):
        room_list = os.listdir(anchor_dir)
        for room in room_list:
            room_dir = gp.generate_dir_path(anchor_dir, room)
            if os.path.isdir(room_dir):
                # Encoder result analyse
                enc_result = analyse_encoder_for_one_room(room_dir, room)
                enc_result_file.write(enc_result)

                # Decoder result analyse
                dec_result = analyse_decoder_for_one_room(room_dir, room)
                dec_result_file.write(dec_result)

    enc_result_file.close()
    dec_result_file.close()
Exemplo n.º 11
0
def saving_logs(scenario):
    result_dir = gp.generate_dir_path(gp.result_dir, scenario)
    if gp.connection_type == gp.connection[0]:
        dst_dir = gp.generate_dir_path(gp.cur_log_dir, scenario)
        os.system('cp ' + gp.client.dir_ + 'commit.log ' + gp.cur_log_dir)
    else:
        dst_dir = gp.generate_dir_path(gp.ref_log_dir, scenario)
        os.system('cp ' + gp.client.dir_ + 'commit.log ' + gp.ref_log_dir)

    room_list = os.listdir(result_dir)

    for room in room_list:
        if os.path.isdir(result_dir + room):
            uid_list = os.listdir(result_dir + room)
            room_dir = gp.generate_dir_path(result_dir, room)
            for uid in uid_list:
                if os.path.isdir(room_dir + uid):
                    cur_uid_dir = gp.generate_dir_path(room_dir, uid)

                    doc_list = os.listdir(cur_uid_dir)
                    dst_uid_dir = gp.generate_dir_path(dst_dir, room, uid)
                    gp.create_dir(dst_uid_dir)
                    for doc in doc_list:
                        if re.search('enc_offline_test_0', doc) \
                                or re.search('enc_online_parameters', doc) \
                                or re.search('enc_save_sent_stream', doc) \
                                or re.search('enc_save_stream_info_0', doc):
                            os.system('cp ' + cur_uid_dir + doc + ' ' +
                                      dst_uid_dir)
                        if gp.mode == 'Overnight':
                            if re.search('dec_offline_test', doc) \
                                    or re.search('dec_save_stream_info', doc) \
                                    or re.search('dec_quality_score', doc) \
                                    or re.search('dec_save_stream_received', doc) \
                                    or re.search('vqmg', doc) \
                                    or re.search('crash', doc) \
                                    or re.search('timestamp', doc):
                                os.system('cp ' + cur_uid_dir + doc + ' ' +
                                          dst_uid_dir)
Exemplo n.º 12
0
def compare_data():
    if gp.cur_log_dir[-1] != '/':
        gp.cur_log_dir += '/'
    if gp.ref_log_dir[-1] != '/':
        gp.ref_log_dir += '/'

    ref_list = os.listdir(gp.ref_log_dir)
    cur_list = os.listdir(gp.cur_log_dir)

    ref_folder = gp.ref_log_dir.split('/')[-2]
    cur_folder = gp.cur_log_dir.split('/')[-2]

    problem_zip_name = gp.cur_time + gp.folder_join + cur_folder.split(gp.folder_join)[1][0:7] \
                       + '_vs_' + ref_folder.split(gp.folder_join)[1][0:7]
    gp.problem_case_dir = gp.generate_dir_path(gp.data_dir, problem_zip_name)
    gp.create_dir(gp.problem_case_dir)
    if not os.path.isdir(gp.problem_dir):
        gp.create_dir(gp.problem_dir)

    header = '<h1>\nThis email is the overnight comparison results\n</h1>'
    header += '<h2>\n Current Commit:\n</h2>' + gp.read_commit_log(gp.cur_log_dir) + \
              '<h3>Run on ' + gp.convert_date(cur_folder.split(gp.folder_join)[0]) + '</h3>'
    header += '<h2>\n Ref Commit:\n</h2>' + gp.read_commit_log(gp.ref_log_dir) + \
              '<h3>Run on ' + gp.convert_date(ref_folder.split(gp.folder_join)[0]) + '</h3>'
    if gp.total_crash != 0:
        header += '<h2>\n<span style="color: red"> Total Crash: ' + str(
            gp.total_crash) + '</span>\n</h2>'
    else:
        header += '<h2>\n No Crash.\n</h2>'

    result = ''
    #brief-result
    brief_result = ''

    for scenario in gp.scenario:
        result += '<hr>\n'
        found_case = True
        ref_enc_file = find_file('Enc_File_' + scenario, ref_list)
        ref_dec_file = find_file('Dec_File_' + scenario, ref_list)
        if ref_enc_file != '' and ref_dec_file != '':
            ref_case_set = cc.CaseSummary(gp.ref_log_dir + ref_enc_file,
                                          gp.ref_log_dir + ref_dec_file)
        else:
            found_case = False

        cur_enc_file = find_file('Enc_File_' + scenario, cur_list)
        cur_dec_file = find_file('Dec_File_' + scenario, cur_list)
        if cur_enc_file != '' and cur_dec_file != '':
            cur_case_set = cc.CaseSummary(gp.cur_log_dir + cur_enc_file,
                                          gp.cur_log_dir + cur_dec_file)
        else:
            found_case = False

        if found_case:
            result += compare_encoder_performance(cur_case_set, ref_case_set,
                                                  scenario)
            result += compare_decoder_performance(cur_case_set, ref_case_set,
                                                  scenario)
            brief_result += compare_encoder_performance_brief(scenario)
            brief_result += compare_decoder_performance_brief(scenario)

    if gp.total_mismatch != 0:
        header += '<h2>\n<span style="color: red"> Total Mismatch: ' + str(
            gp.total_mismatch) + '</span>\n</h2>'
    else:
        header += '<h2>\n No Mismatch.\n</h2>'

    output = '<html>\n<head>\n' + header + result + '</head>\n</html>\n'
    brief_output = '<html>\n<head>\n' + header + brief_result + '</head>\n</html>\n'

    result_file = cur_folder.split(
        gp.folder_join)[1][:7] + '_vs_' + ref_folder.split(
            gp.folder_join)[1][:7] + '.html'
    result_file_handle = open(gp.problem_case_dir + result_file, 'w')
    result_file_handle.write(output)
    result_file_handle.close()

    brief_result_file = cur_folder.split(
        gp.folder_join)[1][:7] + '_vs_' + ref_folder.split(
            gp.folder_join)[1][:7] + '_brief.html'
    brief_result_file_handle = open(gp.problem_case_dir + brief_result_file,
                                    'w')
    brief_result_file_handle.write(brief_output)
    brief_result_file_handle.close()

    gp.move_to_dir(gp.pic_dir, gp.problem_case_dir)
    gp.zip_to_folder(problem_zip_name + '.zip', gp.problem_case_dir,
                     gp.problem_dir)
    gp.remove_dir(gp.problem_case_dir)

    return output
Exemplo n.º 13
0
def run_one_scenario(scenario):
    """
    Only run either Comm/Live/ScSh mode
    """
    fun.gen_dir(scenario)
    fun.check_files()

    p = multiprocessing.Pool()

    gp.print_log(gp.LogLevel.Normal, 'Scaling YUVs...')
    for seqIdx in gp.sequences:
        temp = (seqIdx, )
        p.apply_async(fun.scale_yuv, temp)
        # fun.scale_yuv(seqIdx)
    p.close()
    p.join()
    gp.print_log(gp.LogLevel.Normal, '')

    gp.client_flag = gp.mgr.list(
        [gp.RunningState.Unfinished for _ in range(len(gp.clients))])
    p = multiprocessing.Pool()
    gp.print_log(gp.LogLevel.Normal, 'Start offline test...')

    while 1:
        # Find a unfinished cases
        gp.process_lock.acquire()
        [all_start, all_finish, start_idx, end_idx] = fun.get_next_case()
        gp.process_lock.release()
        client_num = end_idx - start_idx

        if all_finish:
            break
        if all_start:
            time.sleep(1)
            continue

        require_capacity = 0
        for client_idx in range(start_idx, end_idx):
            client = gp.clients[client_idx]
            assert isinstance(client, fun.Client)
            require_capacity += client.capacity_

        # Hold to wait enough processors
        while 1:
            gp.process_lock.acquire()
            if gp.running_process.value + client_num <= gp.active_process \
                    and gp.used_capacity.value + require_capacity <= gp.capacity:
                gp.running_process.value += client_num
                gp.used_capacity.value += require_capacity
                gp.process_lock.release()
                break
            elif gp.used_capacity.value == 0 and require_capacity > gp.capacity:
                gp.print_log(
                    gp.LogLevel.Normal,
                    "Not enough capacity to run this case: " +
                    gp.clients[start_idx].case_)
            gp.process_lock.release()
            time.sleep(1)

        if gp.connection_type == gp.connection[0] and os.popen(
                'pgrep ' + gp.server.get_executable_name()).read() == '':
            os.system(gp.server.get_full_path_executable() + ' >>/dev/null &')
            gp.print_log(
                gp.LogLevel.Normal, 'Restart Server! Current Server PID is ' +
                os.popen('pgrep ' + gp.server.get_executable_name()).read())

        for client_idx in range(start_idx, end_idx):
            gp.process_lock.acquire()
            gp.client_flag[client_idx] = gp.RunningState.Running
            gp.process_lock.release()
            client = gp.clients[client_idx]
            assert isinstance(client, fun.Client)
            uid_dir_name = str(client.uid_) + gp.string_join + client.config_

            client_dir = gp.generate_dir_path(gp.result_dir, scenario,
                                              client.case_, uid_dir_name)
            gp.create_dir(client_dir)
            fun.os_system('cp ' + gp.network_dir +
                          gp.networks[client.network_] + ' ' + client_dir)
            gp.client.copy_executable_to_dir(client_dir)

            if gp.connection_type == gp.connection[1]:
                log_dir = gp.generate_dir_path(gp.cur_log_dir, scenario,
                                               client.case_, uid_dir_name)
                fun.os_system('cp ' + log_dir + 'enc_online_parameters* ' +
                              client_dir)

            gp.print_log(
                gp.LogLevel.Normal, 'Running Case ' + client.case_ +
                ' Client with UID ' + str(client.uid_) + ', Duration ' +
                str(client.duration_) + ', Time ' + time.strftime(
                    '%Y_%m_%d_%H_%M_%S', time.localtime(time.time())))
            p.apply_async(fun.run_client,
                          args=(client_idx, client_dir, scenario))
            # fun.run_client(client_idx, client_dir, scenario)

        gp.print_log(gp.LogLevel.Normal, '')

    p.close()
    p.join()

    if gp.mode == 'Overnight':
        gp.client_flag = gp.mgr.list(
            [gp.RunningState.Unfinished for _ in range(len(gp.clients))])
        p = multiprocessing.Pool()
        gp.print_log(gp.LogLevel.Normal, 'Start VQM test...')

        while 1:
            # Find a unfinished cases
            gp.process_lock.acquire()
            [all_start, all_finish, start_idx, end_idx] = fun.get_next_case()
            gp.process_lock.release()

            if all_finish:
                break
            if all_start:
                time.sleep(1)
                continue

            # Hold to wait enough processors
            for client_idx in range(start_idx, end_idx):
                while 1:
                    gp.process_lock.acquire()
                    if gp.running_process.value < gp.active_process / 2:
                        gp.running_process.value += 1
                        gp.process_lock.release()
                        break
                    gp.process_lock.release()
                    time.sleep(1)

                gp.process_lock.acquire()
                gp.client_flag[client_idx] = gp.RunningState.Running
                gp.process_lock.release()

                client = gp.clients[client_idx]
                assert isinstance(client, fun.Client)
                uid_dir_name = str(
                    client.uid_) + gp.string_join + client.config_
                client_dir = gp.generate_dir_path(gp.result_dir, scenario,
                                                  client.case_, uid_dir_name)
                gp.vqm_test.copy_executable_to_dir(client_dir)
                gp.decode_stream.copy_executable_to_dir(client_dir)
                gp.scale.copy_executable_to_dir(client_dir)

                gp.print_log(
                    gp.LogLevel.Normal, 'Running VQM Case ' + client.case_ +
                    ' Client with UID ' + str(client.uid_) + ', Duration ' +
                    str(client.duration_) + ', Time ' + time.strftime(
                        '%Y_%m_%d_%H_%M_%S', time.localtime(time.time())))
                p.apply_async(fun.run_vqm, args=(client_idx, client_dir))
                # fun.run_vqm(client_idx, client_dir)

            gp.print_log(gp.LogLevel.Normal, '')

        p.close()
        p.join()

    fun.saving_logs(scenario)

    if gp.scenario_crash.value != 0:
        gp.print_log(
            gp.LogLevel.Normal, 'Total ' + str(gp.scenario_crash.value) +
            ' crashes in for ' + scenario + '!!!')
        gp.total_crash += gp.scenario_crash.value
        gp.scenario_crash.value = 0