def transform_ftrace(args): tree = default_tree() tree['ring_buffer'] = True TaskCombiner.disable_handling_leftovers = True with Callbacks(args, tree) as callbacks: if callbacks.is_empty(): return callbacks.get_result() with Progress(os.path.getsize(args.input), 50, "Parsing: " + os.path.basename(args.input)) as progress: count = 0 with open(args.input) as file: handler = FTrace(args, callbacks) for line in file: count += 1 if line.startswith('#'): continue regular = line[:46] payload = line[48:] parts = regular.split() if len(parts) != 4: right = parts[-3:] left = parts[:-3] parts = [' '.join(left)] + right (proc, tid) = parts[0].rsplit('-', 1) cpu = int(parts[1][1:4]) flags = parts[2] timestamp = float(parts[3]) (name, args) = payload.split(':', 1) handler.handle_record(proc, int(tid), cpu, flags, timestamp, name.strip(), args.strip()) if not count % 1000: progress.tick(file.tell()) return callbacks.get_result()
def transform_ftrace(args, preprocess=None): tree = default_tree(args) tree['ring_buffer'] = True args.no_left_overs = True with Callbacks(args, tree) as callbacks: if callbacks.is_empty(): return callbacks.get_result() with Progress(os.path.getsize(args.input), 50, "Parsing: " + os.path.basename(args.input)) as progress: count = 0 with open(args.input) as file: handler = FTrace(args, callbacks) for line in file: count += 1 if line.startswith('#'): continue res = FTraceImporter.parse(line) if preprocess: res = preprocess(res) if not res: continue handler.handle_record(res['name'], res['tgid'], res['pid'], res['cpu'], res['flags'], res['time'], res['event'], res['args']) if not count % 1000: progress.tick(file.tell()) handler.finalize() return callbacks.get_result()
def transform_json(args, preprocess=None): file_size = os.path.getsize(args.input) if not file_size: return [] tree = default_tree(args) tree['ring_buffer'] = True args.no_left_overs = True with Callbacks(args, tree) as callbacks: if callbacks.is_empty(): return callbacks.get_result() with Progress(file_size, 50, "Parsing: " + os.path.basename(args.input)) as progress: count = 0 with codecs.open(args.input, 'r', 'utf-8', errors='ignore') as file: data = the_json.load(file) handler = JsonHandler(args, callbacks) for key, val in (enumerate(data) if isinstance(data, list) else data.items()): count += 1 if not count % 1000: progress.tick(file.tell()) handler.handle_record(key, val) handler.finalize() return callbacks.get_result()
def transform_perf(args, preprocess=None): tree = default_tree(args) tree['ring_buffer'] = True args.no_left_overs = True with Callbacks(args, tree) as callbacks: if callbacks.is_empty(): return callbacks.get_result() with Progress(os.path.getsize(args.input), 50, "Parsing: " + os.path.basename(args.input)) as progress: count = 0 with open(args.input) as file: handler = PerfHandler(args, callbacks) read_stack = None for line in file: count += 1 if not count % 1000: progress.tick(file.tell()) line = line.strip() if read_stack is not None: if not line: handler.handle_stack(read_stack) read_stack = None else: read_stack.append(line) else: fields = parse_event(line) handler.handle_record(fields['name'], int(fields['pid']), int(fields['tid']), float(fields['time'])) if line.endswith(':'): read_stack = [] handler.finalize() return callbacks.get_result()
def transform_csv(args, preprocess=None): tree = default_tree(args) tree['ring_buffer'] = True args.no_left_overs = True with Callbacks(args, tree) as callbacks: if callbacks.is_empty(): return callbacks.get_result() with Progress(os.path.getsize(args.input), 50, "Parsing: " + os.path.basename(args.input)) as progress: count = 0 with open(args.input) as file: handler = CSVHandler(args, callbacks) header = None for line in file: count += 1 if not count % 1000: progress.tick(file.tell()) if line.startswith('//'): continue parts = [item.strip() for item in line.strip().split(',')] if header: fields = dict(zip(header, parts)) if preprocess: fields = preprocess(fields) if not fields: continue handler.handle_record(fields) else: header = parts handler.finalize() return callbacks.get_result()
def transform_dtrace(args): tree = default_tree() tree['ring_buffer'] = True TaskCombiner.disable_handling_leftovers = True gt = get_exporters()['gt'] with Callbacks(args, tree) as callbacks: if callbacks.is_empty(): return callbacks.get_result() dtrace = DTrace(args, gt, callbacks) with Progress(os.path.getsize(args.input), 50, "Parsing: " + os.path.basename(args.input)) as progress: count = 0 with open(args.input) as file: for line in file: line = line.strip() if not line: continue parts = line.split('\t') dtrace.handle_record(int(parts[0], 16), parts[1], parts[2:]) if not count % 1000: progress.tick(file.tell()) count += 1 dtrace.finalize() return callbacks.get_result() + dtrace.get_result()
def transform_dtrace(args): tree = default_tree(args) tree['ring_buffer'] = True args.no_left_overs = True gt = get_exporters()['gt'] with Callbacks(args, tree) as callbacks: if callbacks.is_empty(): return callbacks.get_result() dtrace = DTrace(args, gt, callbacks) size = os.path.getsize(args.input) with Progress( size, 50, "Parsing: %s (%s)" % (os.path.basename(args.input), format_bytes(size))) as progress: count = 0 with codecs.open(args.input, 'r', 'utf-8', errors='ignore') as file: reading_stack = None stack = [] for line in file: count += 1 ends_with_vt = (11 == ord( line[-1])) if len(line) else False #old_line = line line = line.strip('\r\n') #print "%d\t%s" % (count, line) if not line: if reading_stack: dtrace.handle_stack(*(reading_stack + [stack])) reading_stack = None stack = [] continue if reading_stack: if ends_with_vt: # Vertical Tab signifies too long stack frame description line += '...' end_of_line = file.readline( ) # it is also treated as line end by codecs.open line += end_of_line.strip() stack.append(line.replace('\t', ' ')) continue parts = line.split('\t') if len(parts) < 4: print("Warning: weird line:", line) continue if parts[1] in ['ustack', 'kstack', 'jstack']: reading_stack = [ parts[1], int(parts[0], 16), parts[2], parts[3].rstrip(':') ] continue dtrace.handle_record(int(parts[0], 16), parts[1], parts[2:]) if not count % 1000: progress.tick(file.tell()) dtrace.finalize() return callbacks.get_result()
def transform_etw_xml(args): tree = default_tree() tree['ring_buffer'] = True TaskCombiner.disable_handling_leftovers = True with Callbacks(args, tree) as callbacks: if callbacks.is_empty(): return callbacks.get_result() handler = ETWXMLHandler(args, callbacks) handler.parse() TaskCombiner.disable_handling_leftovers = False res = callbacks.get_result() if handler.ftrace != None: res += [handler.ftrace.name] return res
def post_process(args, conn): decoders = get_decoders().get('db', []) if not decoders: return [] tree = default_tree(args) tree['ring_buffer'] = True args.no_left_overs = True with Callbacks(args, tree) as callbacks: if callbacks.is_empty(): return callbacks.get_result() for decoder in get_decoders().get('db', []): decoder(args, callbacks).handle_db(conn) return callbacks.get_result()
def transform_pprof(args): ''' Transforms pprof trace to chosen export format. :param args: args :return: list of callbacks ''' import_profile() if not IS_AVAILABLE: return [] tree = default_tree(args) tree['ring_buffer'] = True args.no_left_overs = True prof = profile.Profile() with Callbacks(args, tree) as callbacks: if callbacks.is_empty(): return callbacks.get_result() with Progress(os.path.getsize(args.input), 50, "Parsing: " + os.path.basename(args.input)) as progress: count = 0 with gzip.open(args.input) as input_file: handler = PprofHandler(args, callbacks) prof.ParseFromString(input_file.read()) handler.preprocess(prof) time = prof.TimeNanos pid = 0 tid = 0 for sample in prof.Sample: duration = sample.Value[1] if len(sample.NumLabel) != 0: for label in sample.NumLabel: if label.key == "pid": pid = label.value[0] elif label.key == "tid": tid = label.value[0] elif label.key == "timestamp": time = label.value[0] handler.handle_record("name", pid, tid, time, duration) time += prof.Period count += 1 if not count % 1000: progress.tick(input_file.tell()) handler.handle_sample(sample) handler.finalize() return callbacks.get_result()
def transform_qnx(args): tree = default_tree(args) tree['ring_buffer'] = True args.no_left_overs = True with Callbacks(args, tree) as callbacks: if callbacks.is_empty(): return callbacks.get_result() parser = Parser(args, callbacks) count = 0 with Progress(os.path.getsize(args.input), 50, "Parsing: " + os.path.basename(args.input)) as progress: with open(args.input) as file: for line in file: parser.on_line(line.strip(), count) count += 1 if not count % 1000: progress.tick(file.tell()) parser.finish() return callbacks.get_result()
def transform_log(args): from dateutil import parser tree = default_tree(args) tree['ring_buffer'] = True with Callbacks(args, tree) as callbacks: if callbacks.is_empty(): return callbacks.get_result() log = Log(args, callbacks) with open(args.input) as file: with Progress(os.path.getsize(args.input), 50, "Parsing: " + os.path.basename(args.input)) as progress: count = 0 header = None first_stamp = None for line in file: if not count % 1000: progress.tick(file.tell()) parts = line.split() if not header: if len(parts) != 5: continue if not parts[0].startswith('Timestamp'): print "Error: this log format is not supported. Expected log from OSX's 'log stream'" header = parts continue else: time = parser.parse(' '.join(parts[0:2])) if first_stamp: time = int( (time - first_stamp).total_seconds() * 10e9) else: first_stamp = time time = 0 tid = int(parts[2], 16) type = parts[3] activity = int(parts[4], 16) pid = int(parts[5], 16) msg = parts[6:] log.handle_record(time, pid, tid, type, activity, msg) return callbacks.get_result()
def transform_dtrace(args): tree = default_tree(args) tree['ring_buffer'] = True args.no_left_overs = True gt = get_exporters()['gt'] with Callbacks(args, tree) as callbacks: if callbacks.is_empty(): return callbacks.get_result() dtrace = DTrace(args, gt, callbacks) with Progress(os.path.getsize(args.input), 50, "Parsing: " + os.path.basename(args.input)) as progress: count = 0 with codecs.open(args.input, 'r', 'utf-8', errors='ignore') as file: reading_stack = None stack = [] for line in file: line = line.strip() if not line: if reading_stack: dtrace.handle_stack(*(reading_stack + [stack])) reading_stack = None stack = [] continue if reading_stack: stack.append(line) continue parts = line.split('\t') if parts[1] == 'stack': reading_stack = [ int(parts[0], 16), parts[2], parts[3].rstrip(':') ] continue dtrace.handle_record(int(parts[0], 16), parts[1], parts[2:]) if not count % 1000: progress.tick(file.tell()) count += 1 dtrace.finalize() return callbacks.get_result()
def transform_dtrace(args): tree = default_tree(args) tree['ring_buffer'] = True args.no_left_overs = True gt = get_exporters()['gt'] with Callbacks(args, tree) as callbacks: if callbacks.is_empty(): return callbacks.get_result() dtrace = DTrace(args, gt, callbacks) with Progress(os.path.getsize(args.input), 50, "Parsing: " + os.path.basename(args.input)) as progress: count = 0 with open(args.input) as file: for line in file: line = line.strip() if not line: continue parts = line.split('\t') dtrace.handle_record(int(parts[0], 16), parts[1], parts[2:]) if not count % 1000: progress.tick(file.tell()) count += 1 dtrace.finalize() return callbacks.get_result() + dtrace.get_result()
def transform_ftrace(args): tree = default_tree() tree['ring_buffer'] = True TaskCombiner.disable_handling_leftovers = True with Callbacks(args, tree) as callbacks: if callbacks.is_empty(): return callbacks.get_result() with Progress(os.path.getsize(args.input), 50, "Parsing: " + os.path.basename(args.input)) as progress: count = 0 with open(args.input) as file: handler = FTrace(args, callbacks) for line in file: count += 1 if line.startswith('#'): continue regular = line[:48].rstrip(' :') payload = line[48:] parts = regular.split() if len(parts) != 4: right = parts[-3:] left = parts[:-3] parts = [' '.join(left)] + right (proc, tid) = parts[0].rsplit('-', 1) cpu = int(parts[1][1:4]) flags = parts[2] timestamp = float(parts[3]) (name, args) = payload.split(':', 1) handler.handle_record(proc, int(tid), cpu, flags, timestamp, name.strip(), args.strip()) if not count % 1000: progress.tick(file.tell()) handler.finalize() return callbacks.get_result()