def __init__(self, args, tree): TaskCombiner.__init__(self, args, tree) self.size_keeper = None self.targets = [] self.trace_number = 0 self.counters = {} self.frames = {} self.samples = [] self.last_task = None self.metadata = {} self.last_relation_id = 0 if self.args.trace: for trace in self.args.trace: if not os.path.exists(trace): print "Error: File not found:", trace continue if trace.endswith(".etl"): self.handle_etw_trace(trace) elif trace.endswith(".ftrace"): self.handle_ftrace(trace) elif trace.endswith(".dtrace"): self.handle_dtrace(trace) elif trace.endswith(".perf"): self.handle_perf(trace) else: print "Error: unsupported extension:", trace self.start_new_trace()
def __init__(self, args, tree): TaskCombiner.__init__(self, args, tree) self.file_name = self.args.output + ".qtd" self.file = open(self.file_name, "w") self.notes = [] self.start_time = None self.end_time = None
def __init__(self, args, tree): TaskCombiner.__init__(self, args, tree) self.mem = {} stat_mem = os.path.join(self.args.input, 'stat.mem') self.mem_stat = self.read_mem_stat(stat_mem) if os.path.exists( stat_mem) else None self.targets = [self.args.output + ".csv"]
def __init__(self, args, tree): TaskCombiner.__init__(self, args, tree) self.file_name = self.get_targets()[-1] self.file = open(self.file_name, "w") self.notes = [] self.start_time = None self.end_time = None
def __init__(self, args, tree): """Open the .btf file and write its header.""" TaskCombiner.__init__(self, args, tree) self.file = open(self.get_targets()[-1], "w+b") self.file.write('#version 2.1.3\n') self.file.write('#creator GDP-SEA\n') self.file.write('#creationDate 2014-02-19T11:39:20Z\n') self.file.write('#timeScale ns\n')
def __init__(self, args, tree): TaskCombiner.__init__(self, args, tree) file = self.get_targets()[0] if os.path.exists(file): os.remove(file) self.conn = sqlite3.connect(file) self.cursor = self.conn.cursor() self.cursor.execute( 'CREATE TABLE tasks (type TEXT, begin JSON, end JSON)') self.cursor.execute('CREATE TABLE meta (data JSON)') self.cursor.execute( 'CREATE TABLE relation (data JSON, head JSON, tail JSON)') self.cursor.execute( 'CREATE TABLE context_switch (time INTEGER, cpu INTEGER, prev JSON, next JSON)' )
def __init__(self, args, tree): TaskCombiner.__init__(self, tree) self.args = args self.target_scale_start = self.args.time_shift self.source_scale_start = 0 self.ratio = 1 / 1000. # nanoseconds to microseconds self.size_keeper = None self.targets = [] self.trace_number = 0 self.counters = {} self.frames = {} self.samples = [] self.last_task = None if self.args.trace: if self.args.trace.endswith(".etl"): self.handle_etw_trace(self.args.trace) else: self.args.sync = self.handle_ftrace(self.args.trace) self.start_new_trace()
def __init__(self, args, tree): TaskCombiner.__init__(self, args, tree) self.size_keeper = None self.targets = [] self.trace_number = 0 self.counters = {} self.frames = {} self.samples = [] self.last_task = None if self.args.trace: for trace in self.args.trace: if trace.endswith(".etl"): self.handle_etw_trace(trace) elif trace.endswith(".ftrace"): self.handle_ftrace(trace) elif trace.endswith(".dtrace"): self.handle_dtrace(trace) else: print "Error: unsupported extension:", trace self.start_new_trace()
def __init__(self, args, tree): TaskCombiner.__init__(self, args, tree) self.mem = {} stat_mem = os.path.join(self.args.input, 'stat.mem') self.mem_stat = self.read_mem_stat(stat_mem) if os.path.exists(stat_mem) else None self.targets = [self.args.output + ".csv"]