def GetWeightedLDV(fp):
    """
    Read the frequency vectors for all the slices in a LRU stack Distance Vector
    (ldv) file, apply weights based in the distances for each element in the vector
    and normalize the resulting vector.

    @return list of normalized, weighted LDV frequency vectors
    """

    # List of lists which contains the result matrix. One element for each slice. 
    #
    result_matrix = []

    # Get the weights to apply to the LDV frequency vectors.
    #
    weight = GetLDVWeights()

    fp = util.OpenCompressFile(options.ldv_file)

    # For each frequency vector, apply the weight and normalize the result.
    #
    slice_num = 0
    while True:
        fv = GetSlice(fp)
        if fv == []:
            break
        # print fv

        # Apply the appropriate weight for each distance in the frequency vector.
        #
        index = 0        # Index of the current element in fv
        vector_sum = 0   # Sum of all counts in this vector
        for block in fv:
            distance = block[0]
            count = block[1]
            if distance > max_dist:
                print 'ERROR: Distance read from LDV file (%d) was greater than max value of: %d' % (distance, max_dist)
                sys.exit(-1)
            count = count*weight[distance]
            fv[index] = (distance, count)
            vector_sum += count
            index += 1
        # print fv

        # Normalize the weighted counts for the frequency vector.
        #
        # import pdb;  pdb.set_trace()
        result_vector = []
        for block in fv:
            count = block[1]
            if count > 0:
                result_vector.append(float(count)/vector_sum)
            else:
                result_vector.append(0.0)     # Vector contains no data
        # print result_vector

        result_matrix.append(result_vector)

    return result_matrix
Exemple #2
0
    def GetLastMetric(self, sim_file, tid, options):
        """
        Get the last metric in a simulator output file.  This is the value
        for running the entire pinball.

        @param sim_file File with simulator results to process
        @param options TID of results to be processed
        @param options Options given on cmd line

        @return metric
        """

        # Get a file pointer to the simulator data file.
        #
        f = util.OpenCompressFile(sim_file)
        if f == None:
            # Error opening file, return an error.
            #
            return -1.0

        # This is the code which needs to be modified in order to use a
        # different metric of interest for a new simulator.  The existing code
        # uses the metric MPI (misses per thousand instruction).
        #
        # Current code assume the default Branch Predictor simulator is used. Get the
        # number of instructions and misses for this thread in the last line of
        # the output.
        #
        # import pdb ; pdb.set_trace()
        instr = misses = 0
        for line in f.readlines():
            pos = line.find('Icount: ')
            if pos != -1:
                last = line
        # import pdb ; pdb.set_trace()
        lst = last.split()
        instr = int(lst[1])
        misses = int(lst[3])

        # Check to make sure there really is valid data.  If not, the print a
        # warning.  No need to exit with an error, because it's possible for
        # MPI_MT_MODE applications to have a different number of threads in
        # each process.  This means some processes may have a thread 'tid',
        # while this process may not.
        #
        if instr > 1:
            metric = misses / (float(instr) / 1000)
        else:
            msg.PrintMsgPlus('WARNING: There were no instructions in simulator output for thread ' + \
                                   str(tid) + ' in file:\n         ' + sim_file)
            msg.PrintMsg('Prediction error for this process may be suspect.')
            if options.mode == config.MPI_MT_MODE:
                msg.PrintMsg(
                    'Since tracing mode is \'mpi_mt\', this may be OK.')
            metric = -1.0  # Error indication

        return metric
def OpenFile(fl, type_str):
    """
    Check to make sure a file exists and open it.

    @return file pointer
    """

    # import pdb;  pdb.set_trace()
    if not os.path.isfile(fl):
        msg.PrintAndExit('File does not exist: %s' % fl)
    fp = util.OpenCompressFile(fl)
    if fp == None:
        err_msg(type_str + fl)

    return fp
def GetOptions():
    """
    Get users command line options/args and check to make sure they are correct.

    @return List of options and 3 file pointers for bbv, simpoint and weights files
    """

    version = '$Revision: 1.1 $';      version = version.replace('$Revision: ', '')
    ver = version.replace(' $', '')
    us = '%prog [options] action file_name [file_name]'
    desc = 'Implements several different actions to process FV (Frequency Vector) files.  ' \
           'An action must be defined in order for the script to run.  '\
           'All actions require at least one file name be given using an option. '\
           '                                                            '\
           '--------------------------------------------'\
           '                                                            '\
           'There are two types of frequency vector files:  '\
           '                                                            '\
           'BBV = Basic Block Vector, '\
           'LDV = LRU stack Distance Vector'


    parser = optparse.OptionParser(usage=us, version=ver, description=desc)

    cmd_options.focus_thread(parser, '')

    # Options which define the actions the script to execute
    #
    action_group = cmd_options.ActionGroup(parser)

    cmd_options.combine(parser, action_group)
    cmd_options.csv_region(parser, action_group)
    cmd_options.project_bbv(parser, action_group)
    cmd_options.weight_ldv(parser, action_group)

    parser.add_option_group(action_group )

    # Options which list the files the script can process
    #
    # import pdb;  pdb.set_trace()
    file_group = cmd_options.FileGroup(parser)

    cmd_options.bbv_file(parser, file_group)
    cmd_options.ldv_file(parser, file_group)
    cmd_options.normal_bbv(parser, file_group)
    cmd_options.normal_ldv(parser, file_group)
    cmd_options.region_file(parser, file_group)
    # cmd_options.vector_file(parser, file_group)
    cmd_options.weight_file(parser, file_group)

    parser.add_option_group(file_group)

    # Parse command line options and get any arguments.
    #
    (options, args) = parser.parse_args()

    matrix = ReadVectorFile(args[0])
    PrintVectorFile(matrix)
    sys.exit(0)

    def TrueXor(*args):
        """Return xor of some booleans."""
        return sum(args) == 1

    # Must have one, and only one, action on command line.
    #
    # import pdb;  pdb.set_trace()
    if not TrueXor(options.csv_region, options.project_bbv, options.weight_ldv, \
       options.combine != None):
           msg.PrintAndExit('Must give one, and only one, action for script to execute.\n'
           'Use -h to get help.')

    # Check to see if options required for the various actions are given.
    #
    file_error = lambda file, action: msg.PrintAndExit("Must use option '" + file + \
        "' to define the file to use with '"  + action + "'.   \nUse -h for help.")

    # import pdb;  pdb.set_trace()
    fp_bbv = fp_ldv = fp_simp = fp_weight = None
    if options.combine:
        # First check to make sure the scaling factor is a valid FP number between 0.0 and 1.0
        #
        if not util.IsFloat(options.combine):
           msg.PrintAndExit('Illegal value for scaling factor: ' + str(options.combine) + \
           '\nScaling factor must be a float between 0.0 and 1.0.')
        else:
            value = float(options.combine)
            if value < 0.0 or value > 1.0:
               msg.PrintAndExit('Scaling factor given (%f) must be between 0.0 and 1.0' % value)
            options.combine = value

        # Then check to make sure required files are given.
        #
        if not options.normal_bbv:
           file_error('--normal_bbv', '--combine')
        if not options.normal_ldv:
           file_error('--normal_ldv', '--combine')
        fp_bbv = OpenNormalFVFile(options.normal_bbv, 'projected, normalized BBV file: ')
        fp_ldv = OpenNormalFVFile(options.normal_ldv, 'projected, normalized BBV file: ')

    if options.csv_region:
        if not options.bbv_file:
           file_error('--bbv_file', '--csv_region')
        if not options.region_file:
           file_error('--region_file', '--csv_region')
        if not options.weight_file:
           file_error('--weight_file', '--csv_region')
        fp_bbv = OpenFVFile(options.bbv_file, 'Basic Block Vector (bbv) file: ')
        fp_simp = OpenSimpointFile(options.region_file, 'simpoints file: ')
        fp_weight = OpenWeightsFile(options.weight_file, 'weights file: ')

    if options.project_bbv:
        if not options.bbv_file:
           file_error('--bbv_file', '--project_bbv')
        fp_bbv = OpenFVFile(options.bbv_file, 'Basic Block Vector (bbv) file: ')

    if options.weight_ldv:
        if not options.ldv_file:
           file_error('--ldv_file', '--weight_ldv')
        fp_ldv = util.OpenCompressFile(options.ldv_file)

    return (options, fp_bbv, fp_ldv, fp_simp, fp_weight)
Exemple #5
0
    def GetRegionMetric(self, sim_file, warmup, tid, options):
        """
        Get the metric of interest for just the representative region, not including
        any warmup instructions.

        It is assumed the first set of CMPSim output data is for the warmup
        instructions, if they exist.  This is true because when the CMPSim was run
        it should have printed out data at 'warmup_len' intervals.

        The last set of data will be for both the representative region and
        warmup instructions, if any.

        Of course, if there's only one set of data, then it is for the region only,
        because there aren't any warmup instruction.

        @param sim_file File with simulator results to process
        @param warmup Number of instructions in warmup section
        @param options TID of results to be processed
        @param options Options given on cmd line

        @return metric
        """

        # Get a file pointer to the simulator data file.
        #
        f = util.OpenCompressFile(sim_file)
        if f == None:
            return -1.0

        # This is the code which needs to be modified in order to use a
        # different metric of interest for a new CMPSim.  The existing code
        # uses the metric CPI.
        #
        # Get the first and last lines in the output that have the
        # cycle/instruction counts.  Assume the 1st is always the info for the
        # warmup because the CMPSim data is dumped ever 'warmup_length'
        # instructions.  Assume last data point is for warmup + region.  If
        # there is only one line, then assume it's only for the region.
        #
        # Current code assume the default Branch Predictor CMPSim is used. 
        #
        # Always use the data for thread 0 because we don't generate prediction
        # error for cooperative region pinballs.  Need to fix this when
        # this capability is added.
        #
        # import pdb ; pdb.set_trace()
        first = ''
        last = ''
        for line in f.readlines():
            pos = line.find('Thread: ' + str(0) + ' Instructions:')
            if pos != -1:

                # If the first time, save it.
                #
                if first == '':
                    first = line
                last = line
        # import pdb ; pdb.set_trace()
        l_list = last.split()
        l_instr = int(l_list[3])
        l_cycles = int(l_list[5])

        if warmup == 0:
            # No warmup. Calc metric from the last set of data.
            #
            if l_instr > 0:
                metric = l_cycles / float(l_instr)
            else:
                msg.PrintAndExit('(1) Unable to calculate CPI because number of instructions is 0:\n' \
                    '            ' + sim_file)
        else:
            # Get number of instructions & cycles for first set of data. (from warmup)
            #
            f_list = first.split()
            f_instr = int(f_list[3])
            f_cycles = int(f_list[5])

            # Calculate region data by subtracting the last values from the
            # first values. This gives number of cycles and instructions for
            # just the region.
            #
            # Check to make sure there really is valid data.  If not, the print a
            # warning.  No need to exit with an error, because it's possible for
            # MPI_MT_MODE applications to have a different number of threads in
            # each process.  This means some processes may have a thread 'tid',
            # while this process may not.
            #
            if l_instr - f_instr > 0:
                metric = (l_cycles - f_cycles) / float(l_instr - f_instr)
            else:
                # import pdb ; pdb.set_trace()
                msg.PrintMsgPlus('WARNING: It looks like there were no warmup instructions in region CMPSim output for thread ' + \
                   str(tid) + ' in file:\n         ' + sim_file)
                msg.PrintMsg('First icount: %s    Last icount: %s' % (locale.format('%d', f_instr, True), \
                    locale.format('%d', l_instr, True)))
                if l_instr < config.instr_cmpsim_phase:
                    msg.PrintMsg(
                        'Slice size may be too small to calculate prediction error.')
                    msg.PrintMsg(
                        'It needs to be at least 1,000,000 for CMPSim to generate valid data.')
                msg.PrintMsg('Prediction error for this process may be suspect.')
                if hasattr(options,
                           'mode') and options.mode == config.MPI_MT_MODE:
                    msg.PrintMsg(
                        'Since tracing mode is \'mpi_mt\', this may be OK.')
                metric = -1.0

        return metric
Exemple #6
0
    def GetLastMetric(self, sim_file, tid, options):
        """
        Get the last metric in a CMPSim output file.  This is the value
        for running the entire pinball.

        Seek until we are close to the end of the file before start looking
        for data. This saves lots of time when processing very large files.

        @param sim_file File with simulator results to process
        @param options TID of results to be processed
        @param options Options given on cmd line

        @return metric (-1 if an error occurs)
        """

        import struct

        # Get the size of the uncompressed simulator data file from the last 4 bytes
        # of the compressed file.  This value is the file size modulo 2^32.
        #
        # import pdb ; pdb.set_trace()
        try:
            fo = open(sim_file, 'rb')
        except IOError:
            msg.PrintMsg('ERROR: Unable to open CMPSim file for whole program pinball:\n   ' + \
                sim_file)
            return -1.0
        try:
            fo.seek(-4, 2)
        except:
            msg.PrintMsg('ERROR: There was a problem accessing data for the WP CMPSim file:\n   ' + \
                sim_file)
            return -1.0
        r = fo.read()
        fo.close()
        size = struct.unpack('<I', r)[0]

        # Get a file pointer to the simulator file.
        #
        f = util.OpenCompressFile(sim_file)
        if f == None:
            return -1.0

        four_GB = 4294967296
        seek_past = 100
        num_chunk = 0

        # First seek to the point in the file given by the 'size'.
        #
        msg.PrintMsgPlus('Determining size of file: ' + sim_file)
        f.seek(size, 1)
        current = f.tell()

        # For files > 4GB, the value for 'size' is the true file size modulo
        # 2^32.  If this is the case, seek in 4GB chunks until the true file
        # size is found.
        #
        # import pdb ; pdb.set_trace()
        while current - (num_chunk * four_GB) >= size:

            # First see if we can seek a few bytes past the current file
            # pointer location.  If we don't advance the FP, then it's at the
            # end of the file. Otherwise, there is a 4GB chunk of the file to
            # be bypassed.
            #
            # import pdb ; pdb.set_trace()
            last = current
            f.seek(seek_past, 1)
            current = f.tell()
            if current == last:
                break
            else:
                msg.PrintMsg('Skipping 4GB in CMPSim file')
                f.seek(four_GB - seek_past, 1)
                num_chunk += 1
                current = f.tell()

            # Check to see if the last seek reached 'size' modulo 2^32
            # bytes. If so, then we are at the end of the file.
            #
            if current - (num_chunk * four_GB) < size:
                break

        # import pdb ; pdb.set_trace()
        size = num_chunk * four_GB + size

        # Skip to 100k bytes before the end of the file. Then start looking for the last set of
        # data in the file. This saves a large amount of time, especially for huge files.
        #
        msg.PrintMsgPlus('Skipping ' + locale.format('%d', size, True) +
                         ' bytes in file: ' + sim_file)
        f.seek(0)
        f.seek(size - 100000)

        # This is the code which needs to be modified in order to use a
        # different metric of interest for a new CMPSim.  The existing code
        # uses the metric CPI.
        #
        # Current code assume the is used. Get the
        # number of instructions and cycles for this thread in the last line of
        # the output.
        #
        instr = cycles = 0
        for line in f.readlines():
            pos = line.find('Thread: ' + str(tid) + ' Instructions:')
            if pos != -1:
                last = line
        # import pdb ; pdb.set_trace()
        lst = last.split()
        instr = int(lst[3])
        cycles = int(lst[5])

        # Check to make sure there really is valid data.  If not, the print a
        # warning.  No need to exit with an error, because it's possible for
        # MPI_MT_MODE applications to have a different number of threads in
        # each process.  This means some processes may have a thread 'tid',
        # while this process may not.
        #
        if instr > 1:
            metric = cycles / float(instr)
        else:
            msg.PrintMsgPlus('WARNING: There were no instructions in WP CMPSim output for thread ' + \
                                   str(tid) + ' in file:\n         ' + sim_file)
            msg.PrintMsg(
                'Prediction error will not be calculated for this process.')
            if options.mode == config.MPI_MT_MODE:
                msg.PrintMsg('Since tracing mode is \'mpi_mt\', this may be OK.')
            metric = -1.0

        return metric
Exemple #7
0
def GetOptions():
    """
    Get users command line options/args and check to make sure they are correct.

    @return List of options and 3 file pointers for bbv, simpoint and weights files
    """

    version = '$Revision: 1.30 $'
    version = version.replace(' ', '')
    ver = version.replace(' $', '')
    us = '%prog [options] action file_name [file_name]'
    desc = 'Implements several actions used to process FV (Frequency Vector) files.  ' \
           'One action, and only one, must be defined in order for the script to run.  '\
           'All actions require at least one file name be given using an option. \n\n'\
           '' \
           'There are two types of frequency vector files:  '\
           '                                                            '\
           'BBV = Basic Block Vector, '\
           '                                                            '\
           'LDV = LRU stack Distance Vector'

    def combine(parser, group):
        """
        IMPORTANT NOTE:
        This is a local definition for the option which has more help
        information than the default defined in cmd_options.py.  This info is
        specific to this script and is not applicable to the other locations
        where the option is used.

        Default value for combine to 'none' instead of setting it to a value
        (as it is in cmd_options.py).  This allows the option to be used to
        determine what to do.

        @return  No return value
        """

        method = cmd_options.GetMethod(parser, group)
        method(
            "--combine",
            dest="combine",
            default=None,
            help=
            "Combine the vectors for BBV and LDV files into a single FV file, use scaling "
            "factor COMBINE (0.0 >= COMBINE <= 1.0).  The BB vectors "
            "are scaled by COMBINE, while the LD vectors are scaled by 1-COMBINE.  Default: 0.5  "
            "Assumes both files have already been transformed by the appropriate process "
            "(project/normal for BBV, weight/normal for LDV). "
            "Must use --normal_bbv and --normal_ldv to define files to process."
        )

    util.CheckNonPrintChar(sys.argv)
    parser = optparse.OptionParser(
        usage=us,
        version=ver,
        description=desc,
        formatter=cmd_options.BlankLinesIndentedHelpFormatter())

    cmd_options.dimensions(parser, '')
    cmd_options.focus_thread(parser, '')

    # Options which define the actions the script to execute
    #
    action_group = cmd_options.ActionGroup(parser)

    combine(parser, action_group)
    cmd_options.csv_region(parser, action_group)
    cmd_options.project_bbv(parser, action_group)
    cmd_options.weight_ldv(parser, action_group)

    parser.add_option_group(action_group)

    # Options which list the files the script can process
    #
    # import pdb;  pdb.set_trace()
    file_group = cmd_options.FileGroup(parser)

    cmd_options.bbv_file(parser, file_group)
    cmd_options.ldv_file(parser, file_group)
    cmd_options.normal_bbv(parser, file_group)
    cmd_options.normal_ldv(parser, file_group)
    cmd_options.region_file(parser, file_group)
    cmd_options.vector_file(parser, file_group)
    cmd_options.weight_file(parser, file_group)

    parser.add_option_group(file_group)

    # Parse command line options and get any arguments.
    #
    (options, args) = parser.parse_args()

    # Added method cbsp() to 'options' to check if running CBSP.
    #
    util.AddMethodcbsp(options)

    def TrueXor(*args):
        """Return xor of some booleans."""
        return sum(args) == 1

    # Must have one, and only one, action on command line.
    #
    # import pdb;  pdb.set_trace()
    if not TrueXor(options.csv_region, options.project_bbv, options.weight_ldv, \
       options.combine != None, options.vector_file != None):
        msg.PrintAndExit(
            'Must give one, and only one, action for script to execute.\n'
            'Use -h to get help.')

    # Check to see if options required for the various actions are given.
    #
    file_error = lambda file, action: msg.PrintAndExit("Must use option '" + file + \
        "' to define the file to use with '" + action + "'.   \nUse -h for help.")

    # import pdb;  pdb.set_trace()
    fp_bbv = fp_ldv = fp_simp = fp_weight = None

    if options.combine:
        # Check to make sure the option 'combine' is an acceptable value.  If so, then turn it into a float.
        #
        util.CheckCombine(options)
        options.combine = float(options.combine)

        # Then check to make sure required files are given.
        #
        if not options.normal_bbv:
            file_error('--normal_bbv', '--combine')
        if not options.normal_ldv:
            file_error('--normal_ldv', '--combine')
        fp_bbv = OpenNormalFVFile(options.normal_bbv,
                                  'projected, normalized BBV file: ')
        fp_ldv = OpenNormalFVFile(options.normal_ldv,
                                  'projected, normalized BBV file: ')

    if options.csv_region:
        if not options.bbv_file:
            file_error('--bbv_file', '--csv_region')
        if not options.region_file:
            file_error('--region_file', '--csv_region')
        if not options.weight_file:
            file_error('--weight_file', '--csv_region')
        fp_bbv = OpenFVFile(options.bbv_file,
                            'Basic Block Vector (bbv) file: ')
        fp_simp = OpenSimpointFile(options.region_file, 'simpoints file: ')
        fp_weight = OpenWeightsFile(options.weight_file, 'weights file: ')

    if options.project_bbv:
        if not options.bbv_file:
            file_error('--bbv_file', '--project_bbv')
        fp_bbv = OpenFVFile(options.bbv_file,
                            'Basic Block Vector (bbv) file: ')

    if options.weight_ldv:
        if not options.ldv_file:
            file_error('--ldv_file', '--weight_ldv')
        fp_ldv = util.OpenCompressFile(options.ldv_file)

    return (options, fp_bbv, fp_ldv, fp_simp, fp_weight)
Exemple #8
0
def GetWeightedLDV(fp, num_dim=32):
    """
    Read the frequency vectors for all the slices in a LRU stack Distance Vector
    (ldv) file, apply weights based in the distances for each element in the vector
    and normalize the resulting vector.

    @return list of normalized, weighted LDV frequency vectors
    """

    # List of lists which contains the result matrix. One element for each slice.
    #
    result_matrix = []

    # Get the weights to apply to the LDV frequency vectors.
    #
    weight = GetLDVWeights()

    fp = util.OpenCompressFile(options.ldv_file)

    # For each frequency vector, apply the weight and normalize the result.
    #
    slice_num = 0
    while True:
        ldv = GetSlice(fp)
        if ldv == []:
            break
        # print ldv

        if num_dim == 0:
            result_matrix.append([])
            continue

        # Apply the appropriate weight for each distance in the frequency vector.
        #
        vector_sum = 0  # Sum of all counts in this vector
        fv = [(0, 0) for i in range(0, num_dim)]
        for block in ldv:
            distance = block[0]
            count = block[1]
            # The distances are 0 based, so maximum allowed distance value is 'max-dim-1'.
            #
            if distance > max_dim - 1:
                print(
                    'ERROR: Distance read from LDV file (%d) was greater than max value of: %d'
                    % (distance, max_dim - 1))
                sys.exit(-1)
            count = count * weight[distance]
            index = distance - 10  # Interesting LDV range is from 10 ...
            index = index * num_dim // (
                25 - 10)  # ... to 25, rescale this range onto 0..num_dim
            if index >= num_dim:
                index = num_dim - 1
            fv[index] = (distance, count)
            vector_sum += count
        # print fv
        fv = PadLDVBlanks(fv, num_dim)
        # print "Padding"
        # print fv

        # Normalize the weighted counts for the frequency vector.
        #
        # import pdb;  pdb.set_trace()
        result_vector = []
        for block in fv:
            count = block[1]
            if count > 0:
                result_vector.append(float(count) / vector_sum)
            else:
                result_vector.append(0.0)  # Vector contains no data
        # print result_vector

        result_matrix.append(result_vector)

    return result_matrix
    def GetRegionMetric(self, sim_file, warmup, tid, options):
        """
        Get the metric of interest for just the representative region, not including
        any warmup instructions.

        It is assumed the first set of simulator output data is for the warmup
        instructions, if they exist.  This is true because when the simulator was run
        the knob to print out data was called with 'warmup_length' instructions.

        The last set of data will be for both the representative region and
        warmup instructions, if any exist.

        Of course, if there's only one set of data, then it is for the region only,
        because there aren't any warmup instruction.

        Return: metric
        """

        # Get a file pointer to the simulator data file.
        #
        f = util.OpenCompressFile(sim_file)
        if f == None:
            # Error opening file, return an error.
            #
            return -1.0

        # This is the code which needs to be modified in order to use a
        # different metric of interest for a new simulator.  The existing code
        # uses the metric MPI (misses per thousand instruction).
        #
        # Current code assume the default Branch Predictor simulator is used.
        #
        # Get the first and last lines in the output that have the
        # cycle/instruction counts.  Assume the 1st is always the info for the
        # warmup because the simulator data is dumped ever 'warmup_length'
        # instructions.  Assume last data point is for warmup + region.  If
        # there is only one line, then assume it's only for the region.
        #
        # Always use the data for thread 0 because we don't generate functional
        # correlation for cooperative region pinballs.  Need to fix this when
        # this capability is added.
        #
        # import pdb ; pdb.set_trace()
        first = ''
        last = ''
        for line in f.readlines():
            pos = line.find('Icount:')
            if pos != -1:

                # If the first time, save it.
                #
                if first == '':
                    first = line
                last = line
        # import pdb ; pdb.set_trace()
        l_list = last.split()
        l_instr = int(l_list[1])
        l_misses = int(l_list[3])

        if warmup == 0:
            # No warmup. Calc metric from the last set of data.
            #
            if l_instr > 0:
                metric = l_misses / (float(l_instr) / 1000)
            else:
                msg.PrintAndExit('(1) Unable to calculate metric because number of instructions is 0:\n' \
                    '            ' + sim_file)
        else:
            # Get number of instructions & misses for first set of data. (from warmup)
            #
            f_list = first.split()
            f_instr = int(f_list[1])
            f_misses = int(f_list[3])

            # Calculate region data by subtracting the last values from the
            # first values. This gives number of misses and instructions for
            # just the region.
            #
            # Check to make sure there really is valid data.  If not, the print a
            # warning.  No need to exit with an error, because it's possible for
            # MPI_MT_MODE applications to have a different number of threads in
            # each process.  This means some processes may have a thread 'tid',
            # while this process may not.
            #
            # import pdb ; pdb.set_trace()
            if l_instr - f_instr > 0:
                metric = (l_misses - f_misses) / (float(l_instr - f_instr) /
                                                  1000)
            else:
                msg.PrintMsgPlus('WARNING: There were no instructions in simulation output for thread ' + \
                                       str(tid) + ' in file:\n         ' + sim_file)
                msg.PrintMsg(
                    'Prediction error for this process may be suspect.')
                if hasattr(options,
                           'mode') and options.mode == config.MPI_MT_MODE:
                    msg.PrintMsg(
                        'Since tracing mode is \'mpi_mt\', this may be OK.')
                metric = -1.0

        return metric