def test_simple(self): indexed_sourcemap = sourcemap_to_index(sourcemap) result = find_source(indexed_sourcemap, 1, 56) assert result == SourceMap(dst_line=0, dst_col=50, src='file2.js', src_line=0, src_col=9, name='multiply')
def fetch_javascript_source(event, **kwargs): """ Attempt to fetch source code for javascript frames. Frames must match the following requirements: - lineno >= 0 - colno >= 0 - abs_path is the HTTP URI to the source - context_line is empty """ logger = fetch_javascript_source.get_logger() try: stacktrace = event.data["sentry.interfaces.Stacktrace"] except KeyError: logger.debug("No stacktrace for event %r", event.id) return # build list of frames that we can actually grab source for frames = [ f for f in stacktrace["frames"] if f.get("lineno") is not None and f.get("context_line") is None and f.get("abs_path", "").startswith(("http://", "https://")) ] if not frames: logger.debug("Event %r has no frames with enough context to fetch remote source", event.id) return file_list = set() sourcemap_capable = set() source_code = {} sourcemaps = {} for f in frames: file_list.add(f["abs_path"]) if f.get("colno") is not None: sourcemap_capable.add(f["abs_path"]) while file_list: filename = file_list.pop() # TODO: respect cache-contro/max-age headers to some extent logger.debug("Fetching remote source %r", filename) result = fetch_url(filename) if result == BAD_SOURCE: continue # If we didn't have a colno, a sourcemap wont do us any good if filename not in sourcemap_capable: source_code[filename] = (result.body.splitlines(), None) continue # TODO: we're currently running splitlines twice sourcemap = discover_sourcemap(result, logger=logger) source_code[filename] = (result.body.splitlines(), sourcemap) if sourcemap: logger.debug("Found sourcemap %r for minified script %r", sourcemap, result.url) # pull down sourcemap if sourcemap and sourcemap not in sourcemaps: result = fetch_url(sourcemap, logger=logger) if result == BAD_SOURCE: continue index = sourcemap_to_index(result.body) sourcemaps[sourcemap] = index # queue up additional source files for download for source in index.sources: if source not in source_code: file_list.add(urljoin(result.url, source)) for frame in frames: try: source, sourcemap = source_code[frame["abs_path"]] except KeyError: # we must've failed pulling down the source continue if frame.get("colno") and sourcemap: state = find_source(sourcemaps[sourcemap], frame["lineno"], frame["colno"]) # TODO: is this urljoin right? (is it relative to the sourcemap or the originating file) abs_path = urljoin(sourcemap, state.src) logger.debug("Mapping compressed source %r to mapping in %r", frame["abs_path"], abs_path) try: source, _ = source_code[abs_path] except KeyError: pass else: # Store original data in annotation frame["data"] = { "orig_lineno": frame["lineno"], "orig_colno": frame["colno"], "orig_function": frame["function"], "orig_abs_path": frame["abs_path"], "orig_filename": frame["filename"], "sourcemap": sourcemap, } # SourceMap's return zero-indexed lineno's frame["lineno"] = state.src_line + 1 frame["colno"] = state.src_col frame["function"] = state.name frame["abs_path"] = abs_path frame["filename"] = state.src # TODO: theoretically a minified source could point to another mapped, minified source frame["pre_context"], frame["context_line"], frame["post_context"] = get_source_context( source=source, lineno=int(frame["lineno"]) ) event.update(data=event.data)
def expand_javascript_source(data, **kwargs): """ Attempt to fetch source code for javascript frames. Frames must match the following requirements: - lineno >= 0 - colno >= 0 - abs_path is the HTTP URI to the source - context_line is empty Mutates the input ``data`` with expanded context if available. """ from sentry.interfaces.stacktrace import Stacktrace try: stacktraces = [ Stacktrace.to_python(e['stacktrace']) for e in data['sentry.interfaces.Exception']['values'] if e.get('stacktrace') ] except KeyError: stacktraces = [] if not stacktraces: logger.debug('No stacktrace for event %r', data['event_id']) return # build list of frames that we can actually grab source for frames = [] for stacktrace in stacktraces: frames.extend([ f for f in stacktrace.frames if f.lineno is not None and f.is_url() ]) if not frames: logger.debug('Event %r has no frames with enough context to fetch remote source', data['event_id']) return data pending_file_list = set() done_file_list = set() sourcemap_capable = set() source_code = {} sourcemap_idxs = {} for f in frames: pending_file_list.add(f.abs_path) if f.colno is not None: sourcemap_capable.add(f.abs_path) while pending_file_list: filename = pending_file_list.pop() done_file_list.add(filename) # TODO: respect cache-contro/max-age headers to some extent logger.debug('Fetching remote source %r', filename) result = fetch_url(filename) if result == BAD_SOURCE: logger.debug('Bad source file %r', filename) continue # If we didn't have a colno, a sourcemap wont do us any good if filename not in sourcemap_capable: logger.debug('Not capable of sourcemap: %r', filename) source_code[filename] = (result.body.splitlines(), None, None) continue sourcemap = discover_sourcemap(result) # TODO: we're currently running splitlines twice if not sourcemap: source_code[filename] = (result.body.splitlines(), None, None) for f in frames: if not f.module and f.abs_path == filename: f.module = generate_module(filename) continue else: logger.debug('Found sourcemap %r for minified script %r', sourcemap[:256], result.url) sourcemap_url = result.url[:1000] sourcemap_key = hashlib.md5(sourcemap_url).hexdigest() source_code[filename] = (result.body.splitlines(), sourcemap_url, sourcemap_key) if sourcemap in sourcemap_idxs: continue # pull down sourcemap index = fetch_sourcemap(sourcemap) if not index: logger.debug('Failed parsing sourcemap index: %r', sourcemap[:15]) continue sourcemap_idxs[sourcemap_key] = (index, sourcemap_url) # queue up additional source files for download for source in index.sources: next_filename = urljoin(sourcemap_url, source) if next_filename not in done_file_list: if index.content: source_code[next_filename] = (index.content[source], None, None) done_file_list.add(next_filename) else: pending_file_list.add(next_filename) last_state = None state = None has_changes = False for frame in frames: try: source, sourcemap_url, sourcemap_key = source_code[frame.abs_path] except KeyError: # we must've failed pulling down the source continue # may have had a failure pulling down the sourcemap previously if sourcemap_key in sourcemap_idxs and frame.colno is not None: index, relative_to = sourcemap_idxs[sourcemap_key] last_state = state state = find_source(index, frame.lineno, frame.colno) abs_path = urljoin(relative_to, state.src) logger.debug('Mapping compressed source %r to mapping in %r', frame.abs_path, abs_path) try: source, _, _ = source_code[abs_path] except KeyError: frame.data = { 'sourcemap': sourcemap_url, } logger.debug('Failed mapping path %r', abs_path) else: # Store original data in annotation frame.data = { 'orig_lineno': frame.lineno, 'orig_colno': frame.colno, 'orig_function': frame.function, 'orig_abs_path': frame.abs_path, 'orig_filename': frame.filename, 'sourcemap': sourcemap_url, } # SourceMap's return zero-indexed lineno's frame.lineno = state.src_line + 1 frame.colno = state.src_col # The offending function is always the previous function in the stack # Honestly, no idea what the bottom most frame is, so we're ignoring that atm if last_state: frame.function = last_state.name or frame.function else: frame.function = state.name or frame.function frame.abs_path = abs_path frame.filename = state.src frame.module = generate_module(state.src) elif sourcemap_key in sourcemap_idxs: frame.data = { 'sourcemap': sourcemap_url, } has_changes = True # TODO: theoretically a minified source could point to another mapped, minified source frame.pre_context, frame.context_line, frame.post_context = get_source_context( source=source, lineno=frame.lineno, colno=frame.colno or 0) if has_changes: logger.debug('Updating stacktraces with expanded source context') for exception, stacktrace in itertools.izip(data['sentry.interfaces.Exception']['values'], stacktraces): exception['stacktrace'] = stacktrace.to_json() # Attempt to fix the culrpit now that we have potentially useful information culprit_frame = stacktraces[0].frames[-1] if culprit_frame.module and culprit_frame.function: data['culprit'] = truncatechars(generate_culprit(culprit_frame), MAX_CULPRIT_LENGTH)
def expand_javascript_source(data, **kwargs): """ Attempt to fetch source code for javascript frames. Frames must match the following requirements: - lineno >= 0 - colno >= 0 - abs_path is the HTTP URI to the source - context_line is empty Mutates the input ``data`` with expanded context if available. """ from sentry.interfaces import Stacktrace try: stacktraces = [ Stacktrace(**e['stacktrace']) for e in data['sentry.interfaces.Exception']['values'] if e.get('stacktrace') ] except KeyError: stacktraces = [] if not stacktraces: logger.debug('No stacktrace for event %r', data['event_id']) return # build list of frames that we can actually grab source for frames = [] for stacktrace in stacktraces: frames.extend([ f for f in stacktrace.frames if f.lineno is not None and f.is_url() ]) if not frames: logger.debug( 'Event %r has no frames with enough context to fetch remote source', data['event_id']) return data pending_file_list = set() done_file_list = set() sourcemap_capable = set() source_code = {} sourmap_idxs = {} for f in frames: pending_file_list.add(f.abs_path) if f.colno is not None: sourcemap_capable.add(f.abs_path) while pending_file_list: filename = pending_file_list.pop() done_file_list.add(filename) # TODO: respect cache-contro/max-age headers to some extent logger.debug('Fetching remote source %r', filename) result = fetch_url(filename) if result == BAD_SOURCE: logger.debug('Bad source file %r', filename) continue # If we didn't have a colno, a sourcemap wont do us any good if filename not in sourcemap_capable: logger.debug('Not capable of sourcemap: %r', filename) source_code[filename] = (result.body.splitlines(), None) continue sourcemap = discover_sourcemap(result) source_code[filename] = (result.body.splitlines(), sourcemap) # TODO: we're currently running splitlines twice if sourcemap: logger.debug('Found sourcemap %r for minified script %r', sourcemap, result.url) elif sourcemap in sourmap_idxs or not sourcemap: continue # pull down sourcemap index = fetch_sourcemap(sourcemap) if not index: logger.debug('Failed parsing sourcemap index: %r', sourcemap[:15]) continue sourmap_idxs[sourcemap] = index # queue up additional source files for download for source in index.sources: next_filename = urljoin(result.url, source) if next_filename not in done_file_list: pending_file_list.add(next_filename) has_changes = False for frame in frames: try: source, sourcemap = source_code[frame.abs_path] except KeyError: # we must've failed pulling down the source continue # may have had a failure pulling down the sourcemap previously if sourcemap in sourmap_idxs and frame.colno is not None: state = find_source(sourmap_idxs[sourcemap], frame.lineno, frame.colno) # TODO: is this urljoin right? (is it relative to the sourcemap or the originating file) abs_path = urljoin(sourcemap, state.src) logger.debug('Mapping compressed source %r to mapping in %r', frame.abs_path, abs_path) try: source, _ = source_code[abs_path] except KeyError: frame.data = { 'sourcemap': sourcemap, } logger.debug('Failed mapping path %r', abs_path) else: # Store original data in annotation frame.data = { 'orig_lineno': frame['lineno'], 'orig_colno': frame['colno'], 'orig_function': frame['function'], 'orig_abs_path': frame['abs_path'], 'orig_filename': frame['filename'], 'sourcemap': sourcemap, } # SourceMap's return zero-indexed lineno's frame.lineno = state.src_line + 1 frame.colno = state.src_col frame.function = state.name frame.abs_path = abs_path frame.filename = state.src has_changes = True # TODO: theoretically a minified source could point to another mapped, minified source frame.pre_context, frame.context_line, frame.post_context = get_source_context( source=source, lineno=frame.lineno) if has_changes: logger.debug('Updating stacktraces with expanded source context') for exception, stacktrace in itertools.izip( data['sentry.interfaces.Exception']['values'], stacktraces): exception['stacktrace'] = stacktrace.serialize()
def fetch_javascript_source(event, **kwargs): """ Attempt to fetch source code for javascript frames. Frames must match the following requirements: - lineno >= 0 - colno >= 0 - abs_path is the HTTP URI to the source - context_line is empty """ import logging logger = fetch_javascript_source.get_logger() logger.setLevel(logging.INFO) logger.addHandler(logging.StreamHandler()) try: stacktrace = event.data['sentry.interfaces.Stacktrace'] except KeyError: logger.info('No stacktrace for event %r', event.id) return # build list of frames that we can actually grab source for frames = [f for f in stacktrace['frames'] if f.get('lineno') is not None and f.get('colno') is not None and f.get('abs_path', '').startswith(('http://', 'https://')) and f.get('context_line') is None] if not frames: logger.info('Event %r has no frames with enough context to fetch remote source', event.id) return file_list = set((f['abs_path'] for f in frames)) source_code = {} sourcemaps = {} while file_list: filename = file_list.pop() # TODO: respect cache-contro/max-age headers to some extent result = fetch_url(filename) if result == BAD_SOURCE: continue # TODO: we're currently running splitlines twice sourcemap = discover_sourcemap(result, logger=logger) source_code[filename] = (result.body.splitlines(), sourcemap) if sourcemap: logger.info('Found sourcemap %r for minified script %r', sourcemap, result.url) # pull down sourcemap if sourcemap and sourcemap not in sourcemaps: result = fetch_url(sourcemap, logger=logger) if result == BAD_SOURCE: continue index = sourcemap_to_index(result.body) sourcemaps[sourcemap] = index # queue up additional source files for download for source in index.sources: if source not in source_code: file_list.add(urljoin(result.url, source)) for frame in frames: try: source, sourcemap = source_code[frame['abs_path']] except KeyError: # we must've failed pulling down the source continue if sourcemap: state = find_source(sourcemaps[sourcemap], frame['lineno'], frame['colno']) # TODO: is this urljoin right? (is it relative to the sourcemap or the originating file) abs_path = urljoin(sourcemap, state.src) try: source, _ = source_code[abs_path] except KeyError: pass else: # SourceMap's return zero-indexed lineno's frame['lineno'] = state.src_line + 1 frame['colno'] = state.src_col frame['name'] = state.name frame['abs_path'] = abs_path frame['filename'] = state.src # TODO: theoretically a minified source could point to another mapped, minified source frame['pre_context'], frame['context_line'], frame['post_context'] = get_source_context( source=source, lineno=int(frame['lineno'])) event.save()
def expand_javascript_source(data, **kwargs): """ Attempt to fetch source code for javascript frames. Frames must match the following requirements: - lineno >= 0 - colno >= 0 - abs_path is the HTTP URI to the source - context_line is empty Mutates the input ``data`` with expanded context if available. """ from sentry.interfaces import Stacktrace try: stacktraces = [ Stacktrace(**e['stacktrace']) for e in data['sentry.interfaces.Exception']['values'] if e.get('stacktrace') ] except KeyError: stacktraces = [] if not stacktraces: logger.debug('No stacktrace for event %r', data['event_id']) return # build list of frames that we can actually grab source for frames = [] for stacktrace in stacktraces: frames.extend([ f for f in stacktrace.frames if f.lineno is not None and f.is_url() ]) if not frames: logger.debug( 'Event %r has no frames with enough context to fetch remote source', data['event_id']) return data pending_file_list = set() done_file_list = set() sourcemap_capable = set() source_code = {} sourmap_idxs = {} for f in frames: pending_file_list.add(f.abs_path) if f.colno is not None: sourcemap_capable.add(f.abs_path) while pending_file_list: filename = pending_file_list.pop() done_file_list.add(filename) # TODO: respect cache-contro/max-age headers to some extent logger.debug('Fetching remote source %r', filename) result = fetch_url(filename) if result == BAD_SOURCE: logger.debug('Bad source file %r', filename) continue # If we didn't have a colno, a sourcemap wont do us any good if filename not in sourcemap_capable: logger.debug('Not capable of sourcemap: %r', filename) source_code[filename] = (result.body.splitlines(), None) continue sourcemap = discover_sourcemap(result) # TODO: we're currently running splitlines twice if not sourcemap: source_code[filename] = (result.body.splitlines(), None) for f in frames: if f.abs_path == filename: f.module = generate_module(filename) continue else: logger.debug('Found sourcemap %r for minified script %r', sourcemap[:256], result.url) sourcemap_key = hashlib.md5(sourcemap).hexdigest() source_code[filename] = (result.body.splitlines(), sourcemap_key) if sourcemap in sourmap_idxs: continue # pull down sourcemap index = fetch_sourcemap(sourcemap) if not index: logger.debug('Failed parsing sourcemap index: %r', sourcemap[:15]) continue if is_data_uri(sourcemap): sourmap_idxs[sourcemap_key] = (index, result.url) else: sourmap_idxs[sourcemap_key] = (index, sourcemap) # queue up additional source files for download for source in index.sources: next_filename = urljoin(sourcemap, source) if next_filename not in done_file_list: if index.content: source_code[next_filename] = (index.content[source], None) done_file_list.add(next_filename) else: pending_file_list.add(next_filename) last_state = None state = None has_changes = False for frame in frames: try: source, sourcemap = source_code[frame.abs_path] except KeyError: # we must've failed pulling down the source continue # may have had a failure pulling down the sourcemap previously if sourcemap in sourmap_idxs and frame.colno is not None: index, relative_to = sourmap_idxs[sourcemap] last_state = state state = find_source(index, frame.lineno, frame.colno) abs_path = urljoin(relative_to, state.src) logger.debug('Mapping compressed source %r to mapping in %r', frame.abs_path, abs_path) try: source, _ = source_code[abs_path] except KeyError: frame.data = { 'sourcemap': sourcemap, } logger.debug('Failed mapping path %r', abs_path) else: # Store original data in annotation frame.data = { 'orig_lineno': frame.lineno, 'orig_colno': frame.colno, 'orig_function': frame.function, 'orig_abs_path': frame.abs_path, 'orig_filename': frame.filename, 'sourcemap': sourcemap, } # SourceMap's return zero-indexed lineno's frame.lineno = state.src_line + 1 frame.colno = state.src_col # The offending function is always the previous function in the stack # Honestly, no idea what the bottom most frame is, so we're ignoring that atm frame.function = last_state.name if last_state else state.name frame.abs_path = abs_path frame.filename = state.src frame.module = generate_module(state.src) or '<unknown module>' elif sourcemap in sourmap_idxs: frame.data = { 'sourcemap': sourcemap, } has_changes = True # TODO: theoretically a minified source could point to another mapped, minified source frame.pre_context, frame.context_line, frame.post_context = get_source_context( source=source, lineno=frame.lineno) if has_changes: logger.debug('Updating stacktraces with expanded source context') for exception, stacktrace in itertools.izip( data['sentry.interfaces.Exception']['values'], stacktraces): exception['stacktrace'] = stacktrace.serialize() # Attempt to fix the culrpit now that we have useful information culprit_frame = stacktraces[0].frames[-1] if culprit_frame.module and culprit_frame.function: data['culprit'] = truncatechars(generate_culprit(culprit_frame), MAX_CULPRIT_LENGTH)
def expand_javascript_source(data, **kwargs): """ Attempt to fetch source code for javascript frames. Frames must match the following requirements: - lineno >= 0 - colno >= 0 - abs_path is the HTTP URI to the source - context_line is empty Mutates the input ``data`` with expanded context if available. """ from sentry.interfaces import Stacktrace try: stacktraces = [Stacktrace(**e["stacktrace"]) for e in data["sentry.interfaces.Exception"]["values"]] except KeyError: stacktraces = [] if not stacktraces: logger.debug("No stacktrace for event %r", data["event_id"]) return # build list of frames that we can actually grab source for frames = [] for stacktrace in stacktraces: frames.extend([f for f in stacktrace.frames if f.lineno is not None and f.is_url()]) if not frames: logger.debug("Event %r has no frames with enough context to fetch remote source", data["event_id"]) return data pending_file_list = set() done_file_list = set() sourcemap_capable = set() source_code = {} sourmap_idxs = {} for f in frames: pending_file_list.add(f.abs_path) if f.colno is not None: sourcemap_capable.add(f.abs_path) while pending_file_list: filename = pending_file_list.pop() done_file_list.add(filename) # TODO: respect cache-contro/max-age headers to some extent logger.debug("Fetching remote source %r", filename) result = fetch_url(filename) if result == BAD_SOURCE: logger.debug("Bad source file %r", filename) continue # If we didn't have a colno, a sourcemap wont do us any good if filename not in sourcemap_capable: logger.debug("Not capable of sourcemap: %r", filename) source_code[filename] = (result.body.splitlines(), None) continue sourcemap = discover_sourcemap(result) source_code[filename] = (result.body.splitlines(), sourcemap) # TODO: we're currently running splitlines twice if sourcemap: logger.debug("Found sourcemap %r for minified script %r", sourcemap, result.url) elif sourcemap in sourmap_idxs or not sourcemap: continue # pull down sourcemap index = fetch_sourcemap(sourcemap) if not index: logger.debug("Failed parsing sourcemap index: %r", sourcemap[:15]) continue sourmap_idxs[sourcemap] = index # queue up additional source files for download for source in index.sources: next_filename = urljoin(result.url, source) if next_filename not in done_file_list: pending_file_list.add(next_filename) has_changes = False for frame in frames: try: source, sourcemap = source_code[frame.abs_path] except KeyError: # we must've failed pulling down the source continue # may have had a failure pulling down the sourcemap previously if sourcemap in sourmap_idxs and frame.colno is not None: state = find_source(sourmap_idxs[sourcemap], frame.lineno, frame.colno) # TODO: is this urljoin right? (is it relative to the sourcemap or the originating file) abs_path = urljoin(sourcemap, state.src) logger.debug("Mapping compressed source %r to mapping in %r", frame.abs_path, abs_path) try: source, _ = source_code[abs_path] except KeyError: logger.debug("Failed mapping path %r", abs_path) else: # Store original data in annotation frame.data = { "orig_lineno": frame["lineno"], "orig_colno": frame["colno"], "orig_function": frame["function"], "orig_abs_path": frame["abs_path"], "orig_filename": frame["filename"], "sourcemap": sourcemap, } # SourceMap's return zero-indexed lineno's frame.lineno = state.src_line + 1 frame.colno = state.src_col frame.function = state.name frame.abs_path = abs_path frame.filename = state.src has_changes = True # TODO: theoretically a minified source could point to another mapped, minified source frame.pre_context, frame.context_line, frame.post_context = get_source_context( source=source, lineno=frame.lineno ) if has_changes: logger.debug("Updating stacktraces with expanded source context") for exception, stacktrace in itertools.izip(data["sentry.interfaces.Exception"]["values"], stacktraces): exception["stacktrace"] = stacktrace.serialize()
def expand_javascript_source(data, **kwargs): """ Attempt to fetch source code for javascript frames. Frames must match the following requirements: - lineno >= 0 - colno >= 0 - abs_path is the HTTP URI to the source - context_line is empty Mutates the input ``data`` with expanded context if available. """ from sentry.interfaces import Stacktrace try: stacktrace = Stacktrace(**data['sentry.interfaces.Stacktrace']) except KeyError: logger.debug('No stacktrace for event %r', data['event_id']) return # build list of frames that we can actually grab source for frames = [ f for f in stacktrace.frames if f.lineno is not None and f.is_url() ] if not frames: logger.debug('Event %r has no frames with enough context to fetch remote source', data['event_id']) return data file_list = set() sourcemap_capable = set() source_code = {} sourcemaps = {} for f in frames: file_list.add(f.abs_path) if f.colno is not None: sourcemap_capable.add(f.abs_path) while file_list: filename = file_list.pop() # TODO: respect cache-contro/max-age headers to some extent logger.debug('Fetching remote source %r', filename) result = fetch_url(filename) if result == BAD_SOURCE: continue # If we didn't have a colno, a sourcemap wont do us any good if filename not in sourcemap_capable: source_code[filename] = (result.body.splitlines(), None) continue # TODO: we're currently running splitlines twice sourcemap = discover_sourcemap(result, logger=logger) source_code[filename] = (result.body.splitlines(), sourcemap) if sourcemap: logger.debug('Found sourcemap %r for minified script %r', sourcemap, result.url) # pull down sourcemap if sourcemap and sourcemap not in sourcemaps: index = fetch_sourcemap(sourcemap, logger=logger) if not index: continue sourcemaps[sourcemap] = index # queue up additional source files for download for source in index.sources: if source not in source_code: file_list.add(urljoin(result.url, source)) has_changes = False for frame in frames: try: source, sourcemap = source_code[frame.abs_path] except KeyError: # we must've failed pulling down the source continue # may have had a failure pulling down the sourcemap previously if sourcemap in sourcemaps and frame.colno is not None: state = find_source(sourcemaps[sourcemap], frame.lineno, frame.colno) # TODO: is this urljoin right? (is it relative to the sourcemap or the originating file) abs_path = urljoin(sourcemap, state.src) logger.debug('Mapping compressed source %r to mapping in %r', frame.abs_path, abs_path) try: source, _ = source_code[abs_path] except KeyError: pass else: # Store original data in annotation frame.data = { 'orig_lineno': frame['lineno'], 'orig_colno': frame['colno'], 'orig_function': frame['function'], 'orig_abs_path': frame['abs_path'], 'orig_filename': frame['filename'], 'sourcemap': sourcemap, } # SourceMap's return zero-indexed lineno's frame.lineno = state.src_line + 1 frame.colno = state.src_col frame.function = state.name frame.abs_path = abs_path frame.filename = state.src has_changes = True # TODO: theoretically a minified source could point to another mapped, minified source frame.pre_context, frame.context_line, frame.post_context = get_source_context( source=source, lineno=frame.lineno) if has_changes: data['sentry.interfaces.Stacktrace'] = stacktrace.serialize()
def fetch_javascript_source(event, **kwargs): """ Attempt to fetch source code for javascript frames. Frames must match the following requirements: - lineno >= 0 - colno >= 0 - abs_path is the HTTP URI to the source - context_line is empty """ logger = fetch_javascript_source.get_logger() try: stacktrace = event.data['sentry.interfaces.Stacktrace'] except KeyError: logger.debug('No stacktrace for event %r', event.id) return # build list of frames that we can actually grab source for frames = [f for f in stacktrace['frames'] if f.get('lineno') is not None and f.get('context_line') is None and f.get('abs_path', '').startswith(('http://', 'https://'))] if not frames: logger.debug('Event %r has no frames with enough context to fetch remote source', event.id) return file_list = set() sourcemap_capable = set() source_code = {} sourcemaps = {} for f in frames: file_list.add(f['abs_path']) if f.get('colno') is not None: sourcemap_capable.add(f['abs_path']) while file_list: filename = file_list.pop() # TODO: respect cache-contro/max-age headers to some extent logger.debug('Fetching remote source %r', filename) result = fetch_url(filename) if result == BAD_SOURCE: continue # If we didn't have a colno, a sourcemap wont do us any good if filename not in sourcemap_capable: source_code[filename] = (result.body.splitlines(), None) continue # TODO: we're currently running splitlines twice sourcemap = discover_sourcemap(result, logger=logger) source_code[filename] = (result.body.splitlines(), sourcemap) if sourcemap: logger.debug('Found sourcemap %r for minified script %r', sourcemap, result.url) # pull down sourcemap if sourcemap and sourcemap not in sourcemaps: result = fetch_url(sourcemap, logger=logger) if result == BAD_SOURCE: continue body = result.body # According to spec (https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit#heading=h.h7yy76c5il9v) # A SouceMap may be prepended with ")]}'" to cause a Javascript error. # If the file starts with that string, ignore the entire first line. if body.startswith(")]}'"): body = body.split('\n', 1)[1] index = sourcemap_to_index(body) sourcemaps[sourcemap] = index # queue up additional source files for download for source in index.sources: if source not in source_code: file_list.add(urljoin(result.url, source)) has_changes = False for frame in frames: try: source, sourcemap = source_code[frame['abs_path']] except KeyError: # we must've failed pulling down the source continue if frame.get('colno') and sourcemap: state = find_source(sourcemaps[sourcemap], frame['lineno'], frame['colno']) # TODO: is this urljoin right? (is it relative to the sourcemap or the originating file) abs_path = urljoin(sourcemap, state.src) logger.debug('Mapping compressed source %r to mapping in %r', frame['abs_path'], abs_path) try: source, _ = source_code[abs_path] except KeyError: pass else: # Store original data in annotation frame['data'] = { 'orig_lineno': frame['lineno'], 'orig_colno': frame['colno'], 'orig_function': frame['function'], 'orig_abs_path': frame['abs_path'], 'orig_filename': frame['filename'], 'sourcemap': sourcemap, } # SourceMap's return zero-indexed lineno's frame['lineno'] = state.src_line + 1 frame['colno'] = state.src_col frame['function'] = state.name frame['abs_path'] = abs_path frame['filename'] = state.src has_changes = True # TODO: theoretically a minified source could point to another mapped, minified source frame['pre_context'], frame['context_line'], frame['post_context'] = get_source_context( source=source, lineno=int(frame['lineno'])) if has_changes: event.update(data=event.data)
def fetch_javascript_source(event, **kwargs): """ Attempt to fetch source code for javascript frames. Frames must match the following requirements: - lineno >= 0 - colno >= 0 - abs_path is the HTTP URI to the source - context_line is empty """ logger = fetch_javascript_source.get_logger() try: stacktrace = event.data['sentry.interfaces.Stacktrace'] except KeyError: logger.debug('No stacktrace for event %r', event.id) return # build list of frames that we can actually grab source for frames = [ f for f in stacktrace['frames'] if f.get('lineno') is not None and f.get('context_line') is None and f.get('abs_path', '').startswith(('http://', 'https://')) ] if not frames: logger.debug( 'Event %r has no frames with enough context to fetch remote source', event.id) return file_list = set() sourcemap_capable = set() source_code = {} sourcemaps = {} for f in frames: file_list.add(f['abs_path']) if f.get('colno') is not None: sourcemap_capable.add(f['abs_path']) while file_list: filename = file_list.pop() # TODO: respect cache-contro/max-age headers to some extent logger.debug('Fetching remote source %r', filename) result = fetch_url(filename) if result == BAD_SOURCE: continue # If we didn't have a colno, a sourcemap wont do us any good if filename not in sourcemap_capable: source_code[filename] = (result.body.splitlines(), None) continue # TODO: we're currently running splitlines twice sourcemap = discover_sourcemap(result, logger=logger) source_code[filename] = (result.body.splitlines(), sourcemap) if sourcemap: logger.debug('Found sourcemap %r for minified script %r', sourcemap, result.url) # pull down sourcemap if sourcemap and sourcemap not in sourcemaps: result = fetch_url(sourcemap, logger=logger) if result == BAD_SOURCE: continue index = sourcemap_to_index(result.body) sourcemaps[sourcemap] = index # queue up additional source files for download for source in index.sources: if source not in source_code: file_list.add(urljoin(result.url, source)) has_changes = False for frame in frames: try: source, sourcemap = source_code[frame['abs_path']] except KeyError: # we must've failed pulling down the source continue if frame.get('colno') and sourcemap: state = find_source(sourcemaps[sourcemap], frame['lineno'], frame['colno']) # TODO: is this urljoin right? (is it relative to the sourcemap or the originating file) abs_path = urljoin(sourcemap, state.src) logger.debug('Mapping compressed source %r to mapping in %r', frame['abs_path'], abs_path) try: source, _ = source_code[abs_path] except KeyError: pass else: # Store original data in annotation frame['data'] = { 'orig_lineno': frame['lineno'], 'orig_colno': frame['colno'], 'orig_function': frame['function'], 'orig_abs_path': frame['abs_path'], 'orig_filename': frame['filename'], 'sourcemap': sourcemap, } # SourceMap's return zero-indexed lineno's frame['lineno'] = state.src_line + 1 frame['colno'] = state.src_col frame['function'] = state.name frame['abs_path'] = abs_path frame['filename'] = state.src has_changes = True # TODO: theoretically a minified source could point to another mapped, minified source frame['pre_context'], frame['context_line'], frame[ 'post_context'] = get_source_context(source=source, lineno=int(frame['lineno'])) if has_changes: event.update(data=event.data)