def get_children_variables(self, fmt=None): _type, _type_name, resolver = get_type(self.value) children_variables = [] if resolver is not None: # i.e.: it's a container. if hasattr(resolver, 'get_contents_debug_adapter_protocol'): # The get_contents_debug_adapter_protocol needs to return sorted. lst = resolver.get_contents_debug_adapter_protocol(self.value, fmt=fmt) else: # If there's no special implementation, the default is sorting the keys. dct = resolver.get_dictionary(self.value) lst = dict_items(dct) lst.sort(key=lambda tup: sorted_attributes_key(tup[0])) # No evaluate name in this case. lst = [(key, value, None) for (key, value) in lst] parent_evaluate_name = self.evaluate_name if parent_evaluate_name: for key, val, evaluate_name in lst: if evaluate_name is not None: if callable(evaluate_name): evaluate_name = evaluate_name(parent_evaluate_name) else: evaluate_name = parent_evaluate_name + evaluate_name variable = _ObjectVariable( key, val, self._register_variable, evaluate_name=evaluate_name) children_variables.append(variable) else: for key, val, evaluate_name in lst: # No evaluate name variable = _ObjectVariable(key, val, self._register_variable) children_variables.append(variable) return children_variables
def _schedule_callback(prev, next): ''' Called when a context is stopped or a new context is made runnable. ''' try: if not prev and not next: return if next: register_tasklet_info(next) # Ok, making next runnable: set the tracing facility in it. debugger = get_global_debugger() if debugger is not None and next.frame: if hasattr(next.frame, 'f_trace'): next.frame.f_trace = debugger.trace_dispatch debugger = None if prev: register_tasklet_info(prev) try: for tasklet_ref, tasklet_info in dict_items(_weak_tasklet_registered_to_info): # Make sure it's a copy! tasklet = tasklet_ref() if tasklet is None or not tasklet.alive: # Garbage-collected already! try: del _weak_tasklet_registered_to_info[tasklet_ref] except KeyError: pass if tasklet_info.frame_id is not None: remove_custom_frame(tasklet_info.frame_id) else: if tasklet.paused or tasklet.blocked or tasklet.scheduled: if tasklet.frame and tasklet.frame.f_back: f_back = tasklet.frame.f_back base = get_abs_path_real_path_and_base_from_frame(f_back)[-1] is_file_to_ignore = dict_contains(DONT_TRACE, base) if not is_file_to_ignore: if tasklet_info.frame_id is None: tasklet_info.frame_id = add_custom_frame(f_back, tasklet_info.tasklet_name, tasklet.thread_id) else: update_custom_frame(tasklet_info.frame_id, f_back, tasklet.thread_id) elif tasklet.is_current: if tasklet_info.frame_id is not None: # Remove info about stackless suspended when it starts to run. remove_custom_frame(tasklet_info.frame_id) tasklet_info.frame_id = None finally: tasklet = None tasklet_info = None f_back = None except: import traceback;traceback.print_exc() if _application_set_schedule_callback is not None: return _application_set_schedule_callback(prev, next)
def _schedule_callback(prev, next): ''' Called when a context is stopped or a new context is made runnable. ''' try: if not prev and not next: return if next: register_tasklet_info(next) # Ok, making next runnable: set the tracing facility in it. debugger = get_global_debugger() if debugger is not None and next.frame: if hasattr(next.frame, 'f_trace'): next.frame.f_trace = debugger.trace_dispatch debugger = None if prev: register_tasklet_info(prev) try: for tasklet_ref, tasklet_info in dict_items(_weak_tasklet_registered_to_info): # Make sure it's a copy! tasklet = tasklet_ref() if tasklet is None or not tasklet.alive: # Garbage-collected already! try: del _weak_tasklet_registered_to_info[tasklet_ref] except KeyError: pass if tasklet_info.frame_id is not None: remove_custom_frame(tasklet_info.frame_id) else: if tasklet.paused or tasklet.blocked or tasklet.scheduled: if tasklet.frame and tasklet.frame.f_back: f_back = tasklet.frame.f_back base = get_abs_path_real_path_and_base_from_frame(f_back)[-1] is_file_to_ignore = base in DONT_TRACE if not is_file_to_ignore: if tasklet_info.frame_id is None: tasklet_info.frame_id = add_custom_frame(f_back, tasklet_info.tasklet_name, tasklet.thread_id) else: update_custom_frame(tasklet_info.frame_id, f_back, tasklet.thread_id) elif tasklet.is_current: if tasklet_info.frame_id is not None: # Remove info about stackless suspended when it starts to run. remove_custom_frame(tasklet_info.frame_id) tasklet_info.frame_id = None finally: tasklet = None tasklet_info = None f_back = None except: import traceback;traceback.print_exc() if _application_set_schedule_callback is not None: return _application_set_schedule_callback(prev, next)
def get_children_variables(self, fmt=None): _type, _type_name, resolver = get_type(self.value) children_variables = [] if resolver is not None: # i.e.: it's a container. if hasattr(resolver, 'get_contents_debug_adapter_protocol'): # The get_contents_debug_adapter_protocol needs to return sorted. lst = resolver.get_contents_debug_adapter_protocol(self.value, fmt=fmt) else: # If there's no special implementation, the default is sorting the keys. dct = resolver.get_dictionary(self.value) lst = dict_items(dct) lst.sort(key=lambda tup: sorted_attributes_key(tup[0])) # No evaluate name in this case. lst = [(key, value, None) for (key, value) in lst] parent_evaluate_name = self.evaluate_name if parent_evaluate_name: for key, val, evaluate_name in lst: if evaluate_name is not None: if callable(evaluate_name): evaluate_name = evaluate_name(parent_evaluate_name) else: evaluate_name = parent_evaluate_name + evaluate_name variable = _ObjectVariable( key, val, self._register_variable, evaluate_name=evaluate_name, frame=self.frame) children_variables.append(variable) else: for key, val, evaluate_name in lst: # No evaluate name variable = _ObjectVariable(key, val, self._register_variable, frame=self.frame) children_variables.append(variable) return children_variables
def get_children_variables(self, fmt=None): children_variables = [] for key, val in dict_items(self.frame.f_locals): is_return_value = key == RETURN_VALUES_DICT if is_return_value: for return_key, return_value in dict_iter_items(val): variable = _ObjectVariable(return_key, return_value, self._register_variable, is_return_value, '%s[%r]' % (key, return_key), frame=self.frame) children_variables.append(variable) else: variable = _ObjectVariable(key, val, self._register_variable, is_return_value, key, frame=self.frame) children_variables.append(variable) # Frame variables always sorted. children_variables.sort(key=sorted_variables_key) return children_variables
def remove_breakpoint(self, py_db, received_filename, breakpoint_type, breakpoint_id): ''' :param str received_filename: Note: must be sent as it was received in the protocol. It may be translated in this function. :param str breakpoint_type: One of: 'python-line', 'django-line', 'jinja2-line'. :param int breakpoint_id: ''' for key, val in dict_items(py_db.api_received_breakpoints): original_filename, existing_breakpoint_id = key _new_filename, _api_add_breakpoint_params = val if received_filename == original_filename and existing_breakpoint_id == breakpoint_id: del py_db.api_received_breakpoints[key] break else: pydev_log.info( 'Did not find breakpoint to remove: %s (breakpoint id: %s)', received_filename, breakpoint_id) file_to_id_to_breakpoint = None received_filename = self.filename_to_server(received_filename) canonical_normalized_filename = pydevd_file_utils.canonical_normalized_path(received_filename) if breakpoint_type == 'python-line': breakpoints = py_db.breakpoints file_to_id_to_breakpoint = py_db.file_to_id_to_line_breakpoint elif py_db.plugin is not None: result = py_db.plugin.get_breakpoints(py_db, breakpoint_type) if result is not None: file_to_id_to_breakpoint = py_db.file_to_id_to_plugin_breakpoint breakpoints = result if file_to_id_to_breakpoint is None: pydev_log.critical('Error removing breakpoint. Cannot handle breakpoint of type %s', breakpoint_type) else: try: id_to_pybreakpoint = file_to_id_to_breakpoint.get(canonical_normalized_filename, {}) if DebugInfoHolder.DEBUG_TRACE_BREAKPOINTS > 0: existing = id_to_pybreakpoint[breakpoint_id] pydev_log.info('Removed breakpoint:%s - line:%s - func_name:%s (id: %s)\n' % ( canonical_normalized_filename, existing.line, existing.func_name.encode('utf-8'), breakpoint_id)) del id_to_pybreakpoint[breakpoint_id] py_db.consolidate_breakpoints(canonical_normalized_filename, id_to_pybreakpoint, breakpoints) if py_db.plugin is not None: py_db.has_plugin_line_breaks = py_db.plugin.has_line_breaks() except KeyError: pydev_log.info("Error removing breakpoint: Breakpoint id not found: %s id: %s. Available ids: %s\n", canonical_normalized_filename, breakpoint_id, dict_keys(id_to_pybreakpoint)) py_db.on_breakpoints_changed(removed=True)
def reapply_breakpoints(self, py_db): ''' Reapplies all the received breakpoints as they were received by the API (so, new translations are applied). ''' items = dict_items(py_db.api_received_breakpoints) # Create a copy with items to reapply. self.remove_all_breakpoints(py_db, '*') for _key, val in items: _new_filename, api_add_breakpoint_params = val self.add_breakpoint(py_db, *api_add_breakpoint_params)
def _verify_breakpoints_with_lines_collected(self, py_db, canonical_normalized_filename, template_breakpoints_for_file, valid_lines_frozenset, sorted_lines): for line, template_bp in dict_items( template_breakpoints_for_file ): # Note: iterate in a copy (we may mutate it). if template_bp.verified_cache_key != valid_lines_frozenset: template_bp.verified_cache_key = valid_lines_frozenset valid = line in valid_lines_frozenset if not valid: new_line = -1 if sorted_lines: # Adjust to the first preceding valid line. idx = bisect.bisect_left(sorted_lines, line) if idx > 0: new_line = sorted_lines[idx - 1] if new_line >= 0 and new_line not in template_breakpoints_for_file: # We just add it if found and if there's no existing breakpoint at that # location. if template_bp.add_breakpoint_result.error_code != PyDevdAPI.ADD_BREAKPOINT_NO_ERROR and template_bp.add_breakpoint_result.translated_line != new_line: pydev_log.debug( 'Template breakpoint in %s in line: %s moved to line: %s', canonical_normalized_filename, line, new_line) template_bp.add_breakpoint_result.error_code = PyDevdAPI.ADD_BREAKPOINT_NO_ERROR template_bp.add_breakpoint_result.translated_line = new_line # Add it to a new line. template_breakpoints_for_file.pop(line, None) template_breakpoints_for_file[ new_line] = template_bp template_bp.on_changed_breakpoint_state( template_bp.breakpoint_id, template_bp.add_breakpoint_result) else: if template_bp.add_breakpoint_result.error_code != PyDevdAPI.ADD_BREAKPOINT_INVALID_LINE: pydev_log.debug( 'Template breakpoint in %s in line: %s invalid (valid lines: %s)', canonical_normalized_filename, line, valid_lines_frozenset) template_bp.add_breakpoint_result.error_code = PyDevdAPI.ADD_BREAKPOINT_INVALID_LINE template_bp.on_changed_breakpoint_state( template_bp.breakpoint_id, template_bp.add_breakpoint_result) else: if template_bp.add_breakpoint_result.error_code != PyDevdAPI.ADD_BREAKPOINT_NO_ERROR: template_bp.add_breakpoint_result.error_code = PyDevdAPI.ADD_BREAKPOINT_NO_ERROR template_bp.on_changed_breakpoint_state( template_bp.breakpoint_id, template_bp.add_breakpoint_result)
def remove_all_breakpoints(self, py_db, filename): ''' Removes all the breakpoints from a given file or from all files if filename == '*'. :param str filename: Note: must be sent as it was received in the protocol. It may be translated in this function. ''' assert filename.__class__ == str # i.e.: bytes on py2 and str on py3 changed = False lst = [ py_db.file_to_id_to_line_breakpoint, py_db.file_to_id_to_plugin_breakpoint, py_db.breakpoints ] if hasattr(py_db, 'django_breakpoints'): lst.append(py_db.django_breakpoints) if hasattr(py_db, 'jinja2_breakpoints'): lst.append(py_db.jinja2_breakpoints) if filename == '*': py_db.api_received_breakpoints.clear() for file_to_id_to_breakpoint in lst: if file_to_id_to_breakpoint: file_to_id_to_breakpoint.clear() changed = True else: items = dict_items(py_db.api_received_breakpoints ) # Create a copy to remove items. translated_filenames = [] for key, val in items: original_filename, _breakpoint_id = key if original_filename == filename: canonical_normalized_filename, _api_add_breakpoint_params = val # Note: there can be actually 1:N mappings due to source mapping (i.e.: ipython). translated_filenames.append(canonical_normalized_filename) del py_db.api_received_breakpoints[key] for canonical_normalized_filename in translated_filenames: for file_to_id_to_breakpoint in lst: if canonical_normalized_filename in file_to_id_to_breakpoint: file_to_id_to_breakpoint.pop( canonical_normalized_filename, None) changed = True if changed: py_db.on_breakpoints_changed(removed=True)
def has_mapping_entry(self, filename): # Note that we're not interested in the line here, just on knowing if a given filename # (from the server) has a mapping for it. key = ('has_entry', filename) try: return self._cache[key] except KeyError: for _source_filename, mapping in dict_items(self._mappings_to_server): for map_entry in mapping: if map_entry.runtime_source == filename: self._cache[key] = True return self._cache[key] self._cache[key] = False return self._cache[key]
def map_to_client(self, filename, lineno): # Note: the filename must be normalized to the client after this point. key = (filename, lineno, 'client') try: return self._cache[key] except KeyError: for source_filename, mapping in dict_items( self._mappings_to_server): for map_entry in mapping: if map_entry.runtime_source == filename: if map_entry.contains_runtime_line(lineno): return source_filename, map_entry.line + ( lineno - map_entry.runtime_line), True return filename, lineno, False
def get_children_variables(self, fmt=None): children_variables = [] for key, val in dict_items(self.frame.f_locals): is_return_value = key == RETURN_VALUES_DICT if is_return_value: for return_key, return_value in dict_iter_items(val): variable = _ObjectVariable( return_key, return_value, self._register_variable, is_return_value, '%s[%r]' % (key, return_key)) children_variables.append(variable) else: variable = _ObjectVariable(key, val, self._register_variable, is_return_value, key) children_variables.append(variable) # Frame variables always sorted. children_variables.sort(key=sorted_variables_key) return children_variables
def map_to_client(self, runtime_source_filename, lineno): key = (lineno, 'client', runtime_source_filename) try: return self._cache[key] except KeyError: for _, mapping in dict_items(self._mappings_to_server): for map_entry in mapping: if map_entry.runtime_source == runtime_source_filename: # <cell1> if map_entry.contains_runtime_line( lineno): # matches line range self._cache[key] = ( map_entry.source_filename, map_entry.line + (lineno - map_entry.runtime_line), True) return self._cache[key] self._cache[key] = ( runtime_source_filename, lineno, False ) # Mark that no translation happened in the cache. return self._cache[key]
def has_mapping_entry(self, runtime_source_filename): ''' :param runtime_source_filename: Something as <ipython-cell-xxx> ''' # Note that we're not interested in the line here, just on knowing if a given filename # (from the server) has a mapping for it. key = ('has_entry', runtime_source_filename) try: return self._cache[key] except KeyError: for _absolute_normalized_filename, mapping in dict_items( self._mappings_to_server): for map_entry in mapping: if map_entry.runtime_source == runtime_source_filename: self._cache[key] = True return self._cache[key] self._cache[key] = False return self._cache[key]
def get_children_variables(self, fmt=None, scope=None): children_variables = [] if scope is not None: assert isinstance(scope, ScopeRequest) scope = scope.scope if scope in ('locals', None): dct = self.frame.f_locals elif scope == 'globals': dct = self.frame.f_globals else: raise AssertionError('Unexpected scope: %s' % (scope,)) lst, group_entries = self._group_entries([(x[0], x[1], None) for x in dict_items(dct) if x[0] != '_pydev_stop_at_break'], handle_return_values=True) group_variables = [] for key, val, _ in group_entries: # Make sure that the contents in the group are also sorted. val.contents_debug_adapter_protocol.sort(key=lambda v:sorted_attributes_key(v[0])) variable = _ObjectVariable(self.py_db, key, val, self._register_variable, False, key, frame=self.frame) group_variables.append(variable) for key, val, _ in lst: is_return_value = key == RETURN_VALUES_DICT if is_return_value: for return_key, return_value in dict_iter_items(val): variable = _ObjectVariable( self.py_db, return_key, return_value, self._register_variable, is_return_value, '%s[%r]' % (key, return_key), frame=self.frame) children_variables.append(variable) else: variable = _ObjectVariable(self.py_db, key, val, self._register_variable, is_return_value, key, frame=self.frame) children_variables.append(variable) # Frame variables always sorted. children_variables.sort(key=sorted_variables_key) if group_variables: # Groups have priority over other variables. children_variables = group_variables + children_variables return children_variables
def _schedule_callback(prev, next): ''' Called when a context is stopped or a new context is made runnable. ''' try: if not prev and not next: return current_frame = sys._getframe() if next: register_tasklet_info(next) # Ok, making next runnable: set the tracing facility in it. debugger = get_global_debugger() if debugger is not None: next.trace_function = debugger.get_thread_local_trace_func() frame = next.frame if frame is current_frame: frame = frame.f_back if hasattr(frame, 'f_trace'): # Note: can be None (but hasattr should cover for that too). frame.f_trace = debugger.get_thread_local_trace_func() debugger = None if prev: register_tasklet_info(prev) try: for tasklet_ref, tasklet_info in dict_items(_weak_tasklet_registered_to_info): # Make sure it's a copy! tasklet = tasklet_ref() if tasklet is None or not tasklet.alive: # Garbage-collected already! try: del _weak_tasklet_registered_to_info[tasklet_ref] except KeyError: pass if tasklet_info.frame_id is not None: remove_custom_frame(tasklet_info.frame_id) else: is_running = stackless.get_thread_info(tasklet.thread_id)[1] is tasklet if tasklet is prev or (tasklet is not next and not is_running): # the tasklet won't run after this scheduler action: # - the tasklet is the previous tasklet # - it is not the next tasklet and it is not an already running tasklet frame = tasklet.frame if frame is current_frame: frame = frame.f_back if frame is not None: abs_real_path_and_base = get_abs_path_real_path_and_base_from_frame(frame) # print >>sys.stderr, "SchedCB: %r, %d, '%s', '%s'" % (tasklet, frame.f_lineno, _filename, base) if debugger.get_file_type(abs_real_path_and_base) is None: tasklet_info.update_name() if tasklet_info.frame_id is None: tasklet_info.frame_id = add_custom_frame(frame, tasklet_info.tasklet_name, tasklet.thread_id) else: update_custom_frame(tasklet_info.frame_id, frame, tasklet.thread_id, name=tasklet_info.tasklet_name) elif tasklet is next or is_running: if tasklet_info.frame_id is not None: # Remove info about stackless suspended when it starts to run. remove_custom_frame(tasklet_info.frame_id) tasklet_info.frame_id = None finally: tasklet = None tasklet_info = None frame = None except: import traceback;traceback.print_exc() if _application_set_schedule_callback is not None: return _application_set_schedule_callback(prev, next)