示例#1
0
def read_hist(file_name1, file_name2, file_type):
    if file_type == "usf":
        try:
            usf_file1 = pyusf.Usf(file_name1)
            usf_file2 = pyusf.Usf(file_name2)
        except IOError, e:
            print_and_exit(str(e))

        hist1 = parse_usf(usf_file1)
        hist2 = parse_usf(usf_file2)

        usf_file1.close()
        usf_file2.close()
示例#2
0
def open_sample_file(file_name, line_size):
    try:
        usf_file = pyusf.Usf()
        usf_file.open(file_name)
    except IOError, e:
        print >> sys.stderr, "Error: %s" % str(e)
        sys.exit(1)
示例#3
0
def main():
    args = parse_args()

    try:
        usf_file = pyusf.Usf(args.ifile_name)
    except IOError, e:
        print_and_exit(str(e))
示例#4
0
文件: nta.py 项目: muneeb/uartpy
    def __init__(self, file_name, condition, min_samples=1,
                 verbose=False, debug=False):
        self.min_samples = min_samples
        self.verbose = verbose
        self.debug = debug

        usf_file = pyusf.Usf()
        usf_file.open(file_name)

        if usf_file.header.flags & pyusf.USF_FLAG_TRACE:
            raise NTAFinderFormatError("Input is not a sample file.")

        self.pcs = {}
        self.rdist_hist = None

        # TODO: Handle multiple line sizes in sample file.
        for event in usf_file:
            if isinstance(event, pyusf.Burst):
                if self.rdist_hist is None:
                    self.rdist_hist = {}
                else:
                    raise NTAFinderFormatError(
                        "Unsupported file format, more than one burst.")
            elif isinstance(event, pyusf.Sample):
                self.__add_rdist(event.begin, event.end)
            elif isinstance(event, pyusf.Dangling):
                self.__add_rdist(event.begin, None)
            else:
                raise NTAFinderFormatError("Input file contains unexpected events.")


        self.nta = condition
        self.nta.init(self.pcs, self.rdist_hist)
示例#5
0
 def __init__(self, file_name, line_size, filter, patch_list, **kwargs):
     try:
         usf_file = pyusf.Usf()
         usf_file.open(file_name)
     except IOError, e:
         print >> sys.stderr, "Error: %s" % str(e)
         sys.exit(1)
示例#6
0
def convert(usf_root_dir, basename="sample_d", new_basename = "memory.samples"):
    samples = glob.glob(usf_root_dir + '/' + basename + ".[0-9]*")

    samples = sorted(samples, key=lambda x: int(os.path.basename(x).split('.')[1]))

    sample_writer = sample_file_util.SampleWriter(new_basename)

    for i, s in enumerate(samples):
        usf_file = pyusf.Usf()
        usf_file.open(s)

        usf_file.next() #skip first Burst event

        for event in usf_file:
            assert(isinstance(event, pyusf.Sample) or isinstance(event, pyusf.Dangling))

            pb_sample = mspb.Sample()
            pb_sample.window_id = i

            pb_sample.begin.CopyFrom(convert_memaccess(event.begin))

            if isinstance(event, pyusf.Sample):
                pb_sample.end.CopyFrom(convert_memaccess(event.end))

            sample_writer.write_sample(pb_sample)
示例#7
0
 def load_usf_files(self,
                    full_trace_usf_file_name,
                    sampled_trace_usf_file_name,
                    quiet=False):
     t0 = time.time()
     self.full_trace_usf_file_name = full_trace_usf_file_name
     self.sampled_trace_usf_file_name = sampled_trace_usf_file_name
     try:
         self.full_trace_usf = pyusf.Usf()
         self.full_trace_usf.open(self.full_trace_usf_file_name)
     except IOError, e:
         self.print_and_exit("Failed to load full trace USF file " +
                             self.full_trace_usf_file_name + ":" + str(e))
示例#8
0
class Associative_Cache:
    already_printed_quantitization_warning = False

    def print_and_exit(self, s):
        print "ERROR:" + s
        print >> sys.stderr, "ERROR:" + s
        sys.exit(1)

    def debug(self, string, level=1):
        if (level <= self.debug_level):
            s = ""
            if (level > 1):
                for x in range(0, level):
                    s += " "
            print s + string

    # Defines the cache in terms of size, bytes per line, number of entries per set (associativity),
    # whether or not dangling references should be counted when analyzing, and the debug level.
    def __init__(self,
                 size_bytes,
                 line_size_bytes,
                 associativity=0,
                 number_of_sets=0,
                 count_dangling=True,
                 debug=1):
        self.debug_level = debug
        self.count_dangling = count_dangling
        # Verify that there are a whole number of lines in the cache
        if (size_bytes % line_size_bytes != 0):
            self.print_and_exit("Invalid line size for cache size: " +
                                str(size_bytes) + "%" + str(line_size_bytes) +
                                "!=0")
        self.size_bytes = int(size_bytes)
        self.line_size_bytes = int(line_size_bytes)
        self.number_of_lines = self.size_bytes / self.line_size_bytes

        # Fully associative
        if (associativity == -1):
            number_of_sets = 1
            associativity = 0

        # Determine associativity and number of sets depending on what was provided
        # The user can provide one or the other or both
        if (associativity == 0 and number_of_sets == 0):
            self.print_and_exit(
                "Either associativity or number_of_sets must be defined.")
        if (associativity != 0):
            self.associativity = int(associativity)
        if (number_of_sets != 0):
            self.number_of_sets = int(number_of_sets)

        if (associativity != 0 and number_of_sets == 0):
            self.number_of_sets = self.number_of_lines / self.associativity
        if (associativity == 0 and number_of_sets != 0):
            self.associativity = self.number_of_lines / self.number_of_sets
        # Verify that there are a whole number of sets in the cache
        if (self.number_of_lines % self.associativity != 0):
            self.print_and_exit(
                "Invlalid associativity for number of lines: " +
                str(self.number_of_lines) + "%" + str(self.associativity) +
                "!=0")
        if (associativity != 0 and number_of_sets != 0):
            if (number_of_sets != self.number_of_lines / associativity):
                self.print_and_exit(
                    "Incomapatible number of sets and associativity for this size."
                )
            if (associativity != self.number_of_lines / number_of_sets):
                self.print_and_exit(
                    "Incompatible number of sets and associativity for this size."
                )

        # Calculate the number of bits and a mask for the line
        self.cache_line_bits_mask = self.line_size_bytes - 1
        self.cache_line_bits = int(math.log(self.line_size_bytes, 2))
        if (self.cache_line_bits != math.log(self.line_size_bytes, 2)):
            self.print_and_exit(
                "Invalid cache size: non-power-of-two line size.")

        # Calculate the number of bits and a mask for the set
        self.cache_set_bits_mask = self.number_of_sets - 1 << self.cache_line_bits
        self.cache_set_bits = int(math.log(self.number_of_sets, 2))
        if (self.cache_set_bits != math.log(self.number_of_sets, 2)):
            self.print_and_exit(
                "Invalid cache size: non-power-of-two associativity.")

        self.debug("Created cache:\n" + str(self), 1)

    # Returns information about the cache configuration as a string
    def __str__(self):
        s = ""
        s += "\tSize: " + str(self.size_bytes) + " (" + str(
            self.size_bytes / 1024.0) + "kB), "
        s += "Line Size: " + str(self.line_size_bytes) + " (" + str(
            self.number_of_lines) + " lines), "
        s += "Associativity: " + str(self.associativity) + " (" + str(self.number_of_sets) + " sets of " +\
             str(self.number_of_lines/self.number_of_sets) + " lines), "
        s += " Line bit mask: 0x%X (%d bits), Set bit mask: 0x%X (%d bits)" % \
                    (self.cache_line_bits_mask, self.cache_line_bits, self.cache_set_bits_mask, self.cache_set_bits)
        return s

    def print_header(self, usf):
        header = usf.header
        version_major = (header.version >> 16) & 0xffff
        version_minor = header.version & 0xffff

        def line_sizes(line_size_mask):
            s = ""
            for i in range(32):
                if line_size_mask & (1 << i):
                    s += "%s " % (1 << i)
            return s.strip()

        s = "Header:"
        s += "\tVersion: %d.%d" % (version_major, version_minor)
        s += "\tCompression: %d (%s)" % (header.compression,
                                         pyusf.strcompr(header.compression))
        s += "\tFlags: 0x%.8x" % (header.flags)
        s += "\tSampling time: %d-%d" % (header.time_begin, header.time_end)
        s += "\tLine sizes: %s" % (line_sizes(header.line_sizes))
        return s

    # Loads the two USF files for the full trace and the sampled version
    def load_usf_files(self,
                       full_trace_usf_file_name,
                       sampled_trace_usf_file_name,
                       quiet=False):
        t0 = time.time()
        self.full_trace_usf_file_name = full_trace_usf_file_name
        self.sampled_trace_usf_file_name = sampled_trace_usf_file_name
        try:
            self.full_trace_usf = pyusf.Usf()
            self.full_trace_usf.open(self.full_trace_usf_file_name)
        except IOError, e:
            self.print_and_exit("Failed to load full trace USF file " +
                                self.full_trace_usf_file_name + ":" + str(e))

        if not self.full_trace_usf.header.flags & pyusf.USF_FLAG_TRACE:
            self.print_and_exit("Full trace is not a trace file.")

        try:
            self.sampled_trace_usf = pyusf.Usf()
            self.sampled_trace_usf.open(self.sampled_trace_usf_file_name)
        except IOError, e:
            self.print_and_exit("Failed to load sampled trace USF file " +
                                self.sampled_trace_usf_file_name + ":" +
                                str(e))