def __init__(self, products): container = ProductsContainer(products) self.products = container self.metadata = MetadataCollection(container) self.clients = ClientContainer(container) self._repr = Repr()
def __init__(self, source, path_to_here=None, defaults=None): # if initialized from another EnvDict, copy the attributes to # initialize # this happens in the CLI parser, which instanttiates the env # because it needs to create one and then replace cli args, then # passes this modified object to DAGSpec if isinstance(source, EnvDict): for attr in ( '_path_to_env', '_preprocessed', '_expander', '_data', '_repr', '_default_keys', ): original = getattr(source, attr) setattr(self, attr, deepcopy(original)) else: ( # load data raw_data, # this will be None if source is a dict self._path_to_env) = load_from_source(source) if defaults: raw_data = {**defaults, **raw_data} # add default placeholders but override them if they are defined # in the raw data default = self._default_dict(include_here=path_to_here is not None) self._default_keys = set(default) - set(raw_data) raw_data = {**default, **raw_data} # check raw data is ok validate.raw_data_keys(raw_data) # expand _module special key, return its expanded value self._preprocessed = raw_preprocess(raw_data, self._path_to_env) # initialize expander, which converts placeholders to their values # we need to pass path_to_env since the {{here}} placeholder # resolves to its parent if path_to_here is None: # if no pat_to_here, use path_to_end path_to_here = (None if self._path_to_env is None else Path( self._path_to_env).parent) else: path_to_here = Path(path_to_here).resolve() self._expander = EnvironmentExpander(self._preprocessed, path_to_here=path_to_here) # now expand all values self._data = self._expander.expand_raw_dictionary(raw_data) self._repr = Repr()
def __init__(self): Repr.__init__(self) # Note: These levels can get adjusted dynamically! My goal is to get more info when printing important debug stuff like exceptions and stack traces and less info when logging normal events. --Zooko 2000-10-14 self.maxlevel = 6 self.maxdict = 6 self.maxlist = 6 self.maxtuple = 6 self.maxstring = 300 self.maxother = 300
def test_tuple(self): eq = self.assertEqual eq(r((1,)), "(1,)") t3 = (1, 2, 3) eq(r(t3), "(1, 2, 3)") r2 = Repr() r2.maxtuple = 2 expected = repr(t3)[:-2] + "...)" eq(r2.repr(t3), expected)
def get_prices_from_api(request_uri: str) -> dict: """using the provided URI, request data from the Octopus API and return a JSON object. Try to handle errors gracefully with retries when appropriate.""" # Try to handle issues with the API - rare but do happen, using an # exponential sleep time up to 2**14 (16384) seconds, approx 4.5 hours. # We will keep trying for over 9 hours and then give up. print('Requesting Agile prices from Octopus API...') retry_count = 0 my_repr = Repr() my_repr.maxstring = 80 # let's avoid truncating our error messages too much while retry_count <= MAX_RETRIES: if retry_count == MAX_RETRIES: raise SystemExit('API retry limit exceeded.') try: success = False response = requests.get(request_uri, timeout=5) response.raise_for_status() if response.status_code // 100 == 2: success = True return response.json() except requests.exceptions.HTTPError as error: print(('API HTTP error ' + str(response.status_code) + ',retrying in ' + str(2**retry_count) + 's')) time.sleep(2**retry_count) retry_count += 1 except requests.exceptions.ConnectionError as error: print(('API connection error: ' + my_repr.repr(str(error)) + ', retrying in ' + str(2**retry_count) + 's')) time.sleep(2**retry_count) retry_count += 1 except requests.exceptions.Timeout: print('API request timeout, retrying in ' + str(2**retry_count) + 's') time.sleep(2**retry_count) retry_count += 1 except requests.exceptions.RequestException as error: raise SystemExit('API Request error: ' + str(error)) from error if success: print('API request successful, status ' + str(response.status_code) + '.') break
def __init__(self, subsequentIndent=""): Repr.__init__(self) self.maxtuple = 20 self.maxset = 160 self.maxlist = 20 self.maxdict = 20 self.maxstring = 1600 self.maxother = 160 self.maxLineLen = 160 self.subsequentIndent = subsequentIndent # Pretty-print? self._pretty = True
def repr_instance(self, obj, level): """ If it is an instance of Exception, format it nicely (trying to emulate the format that you see when an exception is actually raised, plus bracketing '<''s). If it is an instance of dict call self.repr_dict() on it. If it is an instance of list call self.repr_list() on it. Else call Repr.repr_instance(). """ if isinstance(obj, Exception): # Don't cut down exception strings so much. tms = self.maxstring self.maxstring = max(512, tms * 4) tml = self.maxlist self.maxlist = max(12, tml * 4) try: if hasattr(obj, 'args'): if len(obj.args) == 1: return '<' + obj.__class__.__name__ + ': ' + self.repr1( obj.args[0], level - 1) + '>' else: return '<' + obj.__class__.__name__ + ': ' + self.repr1( obj.args, level - 1) + '>' else: return '<' + obj.__class__.__name__ + '>' finally: self.maxstring = tms self.maxlist = tml if isinstance(obj, dict): return self.repr_dict(obj, level) if isinstance(obj, list): return self.repr_list(obj, level) return Repr.repr_instance(self, obj, level)
def repr(self, obj): if isinstance(obj.__class__, DeclarativeMeta): return self.repr_Base(obj, self.maxlevel) if sys.version_info < (3,): return _Repr.repr(self, obj) else: return super(Repr, self).repr(obj)
def test_init_kwargs(self): example_kwargs = { "maxlevel": 101, "maxtuple": 102, "maxlist": 103, "maxarray": 104, "maxdict": 105, "maxset": 106, "maxfrozenset": 107, "maxdeque": 108, "maxstring": 109, "maxlong": 110, "maxother": 111, "fillvalue": "x" * 112, } r1 = Repr() for attr, val in example_kwargs.items(): setattr(r1, attr, val) r2 = Repr(**example_kwargs) for attr in example_kwargs: self.assertEqual(getattr(r1, attr), getattr(r2, attr), msg=attr)
def test_tuple(self): eq = self.assertEqual eq(r((1,)), "(1,)") t3 = (1, 2, 3) eq(r(t3), "(1, 2, 3)") r2 = Repr() r2.maxtuple = 2 expected = repr(t3)[:-2] + "...)" eq(r2.repr(t3), expected) # modified fillvalue: r3 = Repr() r3.fillvalue = '+++' r3.maxtuple = 2 expected = repr(t3)[:-2] + "+++)" eq(r3.repr(t3), expected)
def __init__(self, core_obj, opts=None): Mprocessor.Processor.__init__(self, core_obj) self.response = {"errs": [], "msg": []} self.continue_running = False # True if we should leave command loop self.cmd_instances = self._populate_commands() # command name before alias or macro resolution self.cmd_name = "" # Current command getting run self.current_command = "" self.debug_nest = 1 self.display_mgr = Mdisplay.DisplayMgr() self.intf = core_obj.debugger.intf self.last_command = None # Initially a no-op self.precmd_hooks = [] # If not: # self.location = lambda : print_location(self) self.preloop_hooks = [] self.postcmd_hooks = [] self._populate_cmd_lists() # Stop only if line/file is different from last time self.different_line = None # These values updated on entry. Set initial values. self.curframe = None self.event = None self.event_arg = None self.frame = None self.list_lineno = 0 # Create a custom safe Repr instance and increase its maxstring. # The default of 30 truncates error messages too easily. self._repr = Repr() self._repr.maxstring = 100 self._repr.maxother = 60 self._repr.maxset = 10 self._repr.maxfrozen = 10 self._repr.array = 10 self._saferepr = self._repr.repr self.stack = [] self.thread_name = None self.frame_thread_name = None return
def repr_values(condition: Callable[..., bool], lambda_inspection: Optional[ConditionLambdaInspection], condition_kwargs: Mapping[str, Any], a_repr: reprlib.Repr) -> List[str]: # pylint: disable=too-many-locals """ Represent function arguments and frame values in the error message on contract breach. :param condition: condition function of the contract :param lambda_inspection: inspected lambda AST node corresponding to the condition function (None if the condition was not given as a lambda function) :param condition_kwargs: condition arguments :param a_repr: representation instance that defines how the values are represented. :return: list of value representations """ if is_lambda(a_function=condition): assert lambda_inspection is not None, "Expected a lambda inspection when given a condition as a lambda function" else: assert lambda_inspection is None, "Expected no lambda inspection in a condition given as a non-lambda function" reprs = dict() # type: MutableMapping[str, Any] if lambda_inspection is not None: variable_lookup = collect_variable_lookup( condition=condition, condition_kwargs=condition_kwargs) # pylint: disable=protected-access recompute_visitor = icontract._recompute.Visitor( variable_lookup=variable_lookup) recompute_visitor.visit(node=lambda_inspection.node.body) recomputed_values = recompute_visitor.recomputed_values repr_visitor = Visitor(recomputed_values=recomputed_values, variable_lookup=variable_lookup, atok=lambda_inspection.atok) repr_visitor.visit(node=lambda_inspection.node.body) reprs = repr_visitor.reprs else: for key, val in condition_kwargs.items(): if _representable(value=val): reprs[key] = val parts = [] # type: List[str] for key in sorted(reprs.keys()): parts.append('{} was {}'.format(key, a_repr.repr(reprs[key]))) return parts
def print_kwargs(self, kwargs, show_size=False): repr_instance = Repr() print() for arg_name, arg_val in kwargs.items(): print(f"\x1b[1m\x1b[34m{arg_name}\x1b[0m", end="") if isinstance(arg_val, (list, tuple, set, dict)): print(f"({len(arg_val)})", end="") if show_size: size = round(get_size(arg_val) / 1048576, 4) print(f" {size} MB", end="") print(f": {repr_instance.repr(arg_val)}") print()
def __init__(self, identifier, client=None): super().__init__(identifier) self._client = client self._repr = Repr() self._repr.maxstring = 40 self._remote_ = _RemoteFile(self)
import linecache import bdb import re try: from reprlib import Repr except ImportError: from reprlib import Repr from pycopia import IO from pycopia import UI from pycopia import CLI # Create a custom safe Repr instance and increase its maxstring. # The default of 30 truncates error messages too easily. _repr = Repr() _repr.maxstring = 200 _repr.maxother = 50 _saferepr = _repr.repr DebuggerQuit = bdb.BdbQuit def find_function(funcname, filename): cre = re.compile(r'def\s+%s\s*[(]' % funcname) try: fp = open(filename) except IOError: return None # consumer of this info expects the first line to be 1 lineno = 1 answer = None
from functools import wraps from reprlib import Repr import inspect from flask import request, g from flask_logger_decorator.config import config from flask_logger_decorator.logger import debug __r = Repr() __r.maxarray = __r.maxarray * 10 __r.maxdict = __r.maxdict * 10 __r.maxstring = __r.maxstring * 10 def request_tracing(fn): """ A decorator to tracing request. """ @wraps(fn) def wrapper(*args, **kwargs): tracing_request(fn, *args, **kwargs) return fn(*args, **kwargs) return wrapper def tracing_request(fn, *args, **kwargs): function_args = ' '.join(_get_fn_args(fn, *args, **kwargs)) trace_info = ' '.join(_get_fn_extra_info(fn)) func_msg = 'func_name:{} func_args:{} trace_info:{}'.format( fn.__name__, function_args, trace_info) request_msg = get_request_trace_info()
from collections import deque from itertools import chain from reprlib import Repr from sys import getsizeof, stderr _repr = Repr() _repr.maxdict = 1000 repr = _repr.repr def total_size(o, handlers={}, verbose=False): """Returns the approximate memory footprint an object and all of its contents. Automatically finds the contents of the following builtin containers and their subclasses: tuple, list, deque, dict, set and frozenset. To search other containers, add handlers to iterate over their contents: handlers = {SomeContainerClass: iter, OtherContainerClass: OtherContainerClass.get_elements} """ dict_handler = lambda d: chain.from_iterable((k, v) for k, v in d.items() if k not in { "sys", "__builtins__", }) all_handlers = { tuple: iter, list: iter, deque: iter, dict: dict_handler,
import cmd import bdb from reprlib import Repr import os import re import pprint import traceback class Restart(Exception): """Causes a debugger to be restarted for the debugged python program.""" pass # Create a custom safe Repr instance and increase its maxstring. # The default of 30 truncates error messages too easily. _repr = Repr() _repr.maxstring = 200 _saferepr = _repr.repr __all__ = ["run", "pm", "Pdb", "runeval", "runctx", "runcall", "set_trace", "post_mortem", "help"] def find_function(funcname, filename): cre = re.compile(r'def\s+%s\s*[(]' % re.escape(funcname)) try: fp = open(filename) except IOError: return None # consumer of this info expects the first line to be 1 lineno = 1 answer = None
def __init__(self, core_obj, opts=None): get_option = lambda key: \ Mmisc.option_set(opts, key, DEFAULT_PROC_OPTS) Mprocessor.Processor.__init__(self, core_obj) self.continue_running = False # True if we should leave command loop self.event2short = dict(EVENT2SHORT) self.event2short['signal'] = '?!' self.event2short['brkpt'] = 'xx' self.optional_modules = ('ipython', 'bpy') self.cmd_instances = self._populate_commands() # command argument string. Is like current_command, but the part # after cmd_name has been removed. self.cmd_argstr = '' # command name before alias or macro resolution self.cmd_name = '' self.cmd_queue = [] # Queued debugger commands self.completer = lambda text, state: \ Mcomplete.completer(self, text, state) self.current_command = '' # Current command getting run self.debug_nest = 1 self.display_mgr = Mdisplay.DisplayMgr() self.intf = core_obj.debugger.intf self.last_command = None # Initially a no-op self.precmd_hooks = [] self.location = lambda: print_location(self) self.preloop_hooks = [] self.postcmd_hooks = [] self._populate_cmd_lists() self.prompt_str = '(trepan3k) ' # Stop only if line/file is different from last time self.different_line = None # These values updated on entry. Set initial values. self.curframe = None self.event = None self.event_arg = None self.frame = None self.list_lineno = 0 # last list number used in "list" self.list_filename = None # last filename used in list self.macros = {} # Debugger Macros # Create a custom safe Repr instance and increase its maxstring. # The default of 30 truncates error messages too easily. self._repr = Repr() self._repr.maxstring = 100 self._repr.maxother = 60 self._repr.maxset = 10 self._repr.maxfrozen = 10 self._repr.array = 10 self.stack = [] self.thread_name = None self.frame_thread_name = None initfile_list = get_option('initfile_list') for init_cmdfile in initfile_list: self.queue_startfile(init_cmdfile) return
class MetaProduct(Mapping): """ Exposes a Product-like API to allow Tasks to create more than one Product, it is automatically instantiated when a Task is initialized with a sequence or a mapping object in the product parameter. While it is recommended for Tasks to only have one Product (to keep them simple), in some cases it makes sense. For example, a Jupyter notebook (executed via NotebookRunner), for fitting a model might as well serialize the things such as the model and any data preprocessors """ def __init__(self, products): container = ProductsContainer(products) self.products = container self.metadata = MetadataCollection(container) self.clients = ClientContainer(container) self._repr = Repr() @property def task(self): # TODO: validate same task return self.products[0].task @property def client(self): return self.clients @task.setter def task(self, value): for p in self.products: try: p.task = value except AttributeError as e: raise AttributeError( "Expected MetaProduct to initialize with Product " "instancess (which have a 'task' attribute), but " f"got {p!r}, an object of type {type(p)}. Replace it " "with a valid Product object. If this is a file, use " f"File({p!r})") from e def exists(self): return all([p.exists() for p in self.products]) def delete(self, force=False): for product in self.products: product.delete(force) def download(self): for product in self.products: product.download() def upload(self): for product in self.products: product.upload() def _is_outdated(self, outdated_by_code=True): is_outdated = [ p._is_outdated(outdated_by_code=outdated_by_code) for p in self.products ] if set(is_outdated) == {False}: return False if set(is_outdated) <= {TaskStatus.WaitingDownload, False}: return TaskStatus.WaitingDownload return any(is_outdated) def _outdated_data_dependencies(self): return any([p._outdated_data_dependencies() for p in self.products]) def _outdated_code_dependency(self): return any([p._outdated_code_dependency() for p in self.products]) def to_json_serializable(self): """Returns a JSON serializable version of this product """ # NOTE: this is used in tasks where only JSON serializable parameters # are supported such as NotebookRunner that depends on papermill return self.products.to_json_serializable() def render(self, params, **kwargs): for p in self.products: p.render(params, **kwargs) def __repr__(self): content = self._repr.repr1(self.products.products, level=2) return f'{type(self).__name__}({content})' def __str__(self): return str(self.products) def __iter__(self): for product in self.products: yield product def __getitem__(self, key): return self.products[key] def __len__(self): return len(self.products) @property def _remote(self): return self.products.first._remote def _is_remote_outdated(self, outdated_by_code): return self.products.first._is_remote_outdated(outdated_by_code)
class Placeholder(abc.AbstractPlaceholder): """ Placeholder powers all the objects that use placeholder variables ( between curly brackets). It uses a jinja2.Template object under the hood but adds a few important things: * Keeps the raw (undendered) value: template.raw * Keeps path to raw value: template.location * Strict: will not render if missing or extra parameters * Upon calling .render, saves the rendered value for later access End users should not manipulate Placeholder objects, they should be automatically created from strings, pathlib.Path or jinja2.Template objects. Placeholder is mostly used by sources whose source code are parametrized strings (e.g. SQL scripts) Parameters ---------- hot_reload : bool, optional Makes the placeholder always read the template from the file before rendering required Set of keys required for rendering Attributes ---------- variables : set Returns the set of variables in the template (values sourrounded by {{ and }}) path : pathlib.Path The location of the raw object. None if initialized with a str or with a jinja2.Template created from a str Notes ----- You can use "raise" in a placeholder to raise exceptions, useful for validating input parameters: "{% raise 'some error message' %}" """ def __init__(self, primitive, hot_reload=False, required=None): self._logger = logging.getLogger('{}.{}'.format( __name__, type(self).__name__)) self._hot_reload = hot_reload self._variables = None self.__template = None # we have to take care of 4 possible cases and make sure we have # all we need to initialize the template, this includes having # access to the raw template (str) and a way to re-initialize # the jinja.environment.loader object (to make sure copies and # pickles work) if isinstance(primitive, Path): self._path = primitive self.__raw = primitive.read_text() self._loader_init = None elif isinstance(primitive, str): self._path = None self.__raw = primitive self._loader_init = None elif isinstance(primitive, Template): # NOTE: primitive.filename will be '<template>' if Template was # loaded from a string path = Path(primitive.filename) if primitive.environment.undefined != StrictUndefined: raise ValueError('Placeholder can only be initialized ' 'from jinja2.Templates whose undefined ' 'parameter is set to ' 'jinja2.StrictUndefined, set it explicitely ' 'either in the Template or Environment ' 'constructors') # we cannot get the raw template on this case, raise error # check '<template>' first, because Path('<template>').exists() # breaks on windows if primitive.filename == '<template>' or not path.exists(): raise ValueError( 'Could not load raw source from ' 'jinja2.Template. This usually happens ' 'when the placeholder is initialised with a ' 'jinja.Template which was initialized with ' 'a string. Only jinja2.Templates loaded from ' 'the filesystem are supported. Use ' 'ploomber.SourceLoader or jinja\'s ' 'FileSystemLoader/PackageLoader to fix ' 'this issue, if you want to create a template from ' 'a string pass it directly ' 'Placeholder("some {{placeholder}}")') self._path = path self.__raw = path.read_text() self._loader_init = _make_loader_init(primitive.environment.loader) # SourceLoader returns Placeholder objects, which could inadvertedly # be passed to another Placeholder constructor when instantiating # a source object, since they sometimes use placeholders # make sure this case is covered elif isinstance(primitive, Placeholder): self._path = primitive.path self.__raw = primitive._raw self._loader_init = _make_loader_init( primitive._template.environment.loader) else: raise TypeError('{} must be initialized with a Template, ' 'Placeholder, pathlib.Path or str, ' 'got {} instead'.format( type(self).__name__, type(primitive).__name__)) if self._path is None and hot_reload: raise ValueError('hot_reload only works when Placeholder is ' 'initialized from a file') # TODO: remove self.needs_render = self._needs_render() self._str = None if self.needs_render else self._raw self._repr = Repr() self._repr.maxstring = 40 if required: self._validate_required(required) def _validate_required(self, required): missing_required = set(required) - self.variables if missing_required: msg = ('The following tags are required. ' + display_error(missing_required, required)) raise SourceInitializationError(msg) @property def _template(self): if self.__template is None or self._hot_reload: self.__template = _init_template(self._raw, self._loader_init) return self.__template @property def _raw(self): """A string with the raw jinja2.Template contents """ if self._hot_reload: self.__raw = self._path.read_text() return self.__raw @_raw.setter def _raw(self, value): self.__raw = value @property def path(self): return self._path def _needs_render(self): """ Returns true if the template is a literal and does not need any parameters to render """ env = self._template.environment # check if the template has the variable or block start string # is there any better way of checking this? needs_variables = (env.variable_start_string in self._raw and env.variable_end_string in self._raw) needs_blocks = (env.block_start_string in self._raw and env.block_end_string in self._raw) return needs_variables or needs_blocks def __str__(self): if self._str is None: raise RuntimeError('Tried to read {} {} without ' 'rendering first'.format( type(self).__name__, repr(self))) return self._str def render(self, params, optional=None, required=None): """ """ optional = optional or set() optional = set(optional) passed = set(params.keys()) available = passed | set(self._template.environment.globals) missing = self.variables - available extra = passed - self.variables - optional # FIXME: self.variables should also be updated on hot_reload if missing: raise RenderError('in {}, missing required ' 'parameters: {}, params passed: {}'.format( repr(self), missing, params)) if extra: raise RenderError('in {}, unused parameters: {}, params ' 'declared: {}'.format(repr(self), extra, self.variables)) try: self._str = self._template.render(**params) except UndefinedError as e: # TODO: we can use e.message to see which param caused the # error raise RenderError('in {}, jinja2 raised an UndefinedError, this ' 'means the template is using an attribute ' 'or item that does not exist, the original ' 'traceback is shown above. For jinja2 ' 'implementation details see: ' 'http://jinja.pocoo.org/docs/latest' '/templates/#variables'.format( repr(self))) from e return str(self) def best_repr(self, shorten): """ Returns the rendered version (if available), otherwise the raw version """ best = self._raw if self._str is None else self._str if shorten: best = self._repr.repr(best) return best @property def variables(self): """Returns declared variables in the template """ # this requires parsing the raw template, do lazy load, but override # it if hot_reload is True if self._variables is None or self._hot_reload: self._variables = util.get_tags_in_str(self._raw) return self._variables def __repr__(self): content = self.best_repr(shorten=True) return f'{type(self).__name__}({content})' def __getstate__(self): state = self.__dict__.copy() del state['_logger'] del state['_Placeholder__template'] return state def __setstate__(self, state): self.__dict__.update(state) self._logger = logging.getLogger('{}.{}'.format( __name__, type(self).__name__)) self.__template = None
def __init__(self): Repr.__init__(self) self.maxlist = self.maxtuple = 20 self.maxdict = 10 self.maxstring = self.maxother = 100
# XXX TO DO: # - popup menu # - support partial or total redisplay # - more doc strings # - tooltips # object browser # XXX TO DO: # - for classes/modules, add "open source" to object browser from .TreeWidget import TreeItem, TreeNode, ScrolledCanvas from reprlib import Repr myrepr = Repr() myrepr.maxstring = 100 myrepr.maxother = 100 class ObjectTreeItem(TreeItem): def __init__(self, labeltext, object, setfunction=None): self.labeltext = labeltext self.object = object self.setfunction = setfunction def GetLabelText(self): return self.labeltext def GetText(self): return myrepr.repr(self.object)
def __init__(self, primitive, hot_reload=False, required=None): self._logger = logging.getLogger('{}.{}'.format( __name__, type(self).__name__)) self._hot_reload = hot_reload self._variables = None self.__template = None # we have to take care of 4 possible cases and make sure we have # all we need to initialize the template, this includes having # access to the raw template (str) and a way to re-initialize # the jinja.environment.loader object (to make sure copies and # pickles work) if isinstance(primitive, Path): self._path = primitive self.__raw = primitive.read_text() self._loader_init = None elif isinstance(primitive, str): self._path = None self.__raw = primitive self._loader_init = None elif isinstance(primitive, Template): # NOTE: primitive.filename will be '<template>' if Template was # loaded from a string path = Path(primitive.filename) if primitive.environment.undefined != StrictUndefined: raise ValueError('Placeholder can only be initialized ' 'from jinja2.Templates whose undefined ' 'parameter is set to ' 'jinja2.StrictUndefined, set it explicitely ' 'either in the Template or Environment ' 'constructors') # we cannot get the raw template on this case, raise error # check '<template>' first, because Path('<template>').exists() # breaks on windows if primitive.filename == '<template>' or not path.exists(): raise ValueError( 'Could not load raw source from ' 'jinja2.Template. This usually happens ' 'when the placeholder is initialised with a ' 'jinja.Template which was initialized with ' 'a string. Only jinja2.Templates loaded from ' 'the filesystem are supported. Use ' 'ploomber.SourceLoader or jinja\'s ' 'FileSystemLoader/PackageLoader to fix ' 'this issue, if you want to create a template from ' 'a string pass it directly ' 'Placeholder("some {{placeholder}}")') self._path = path self.__raw = path.read_text() self._loader_init = _make_loader_init(primitive.environment.loader) # SourceLoader returns Placeholder objects, which could inadvertedly # be passed to another Placeholder constructor when instantiating # a source object, since they sometimes use placeholders # make sure this case is covered elif isinstance(primitive, Placeholder): self._path = primitive.path self.__raw = primitive._raw self._loader_init = _make_loader_init( primitive._template.environment.loader) else: raise TypeError('{} must be initialized with a Template, ' 'Placeholder, pathlib.Path or str, ' 'got {} instead'.format( type(self).__name__, type(primitive).__name__)) if self._path is None and hot_reload: raise ValueError('hot_reload only works when Placeholder is ' 'initialized from a file') # TODO: remove self.needs_render = self._needs_render() self._str = None if self.needs_render else self._raw self._repr = Repr() self._repr.maxstring = 40 if required: self._validate_required(required)
import logging from pprintpp import pformat as pf, pprint as pp from reprlib import Repr import shutil import sys from textwrap import indent, wrap import sqlparse as sp from tlbx.object_utils import get_class_vars, get_caller logging.basicConfig(format="%(message)s") _repr = Repr() _repr.maxstring = 60 _repr.maxother = 20 _repr.maxlist = 5 _repr.maxtuple = 5 _repr.maxset = 5 repr = _repr.repr DEFAULT_TERM_WIDTH = 80 class FontSpecialChars: ENDC = "\033[0m" class FontColors:
# XXX TO DO: # - popup menu # - support partial or total redisplay # - more doc strings # - tooltips # object browser # XXX TO DO: # - for classes/modules, add "open source" to object browser from reprlib import Repr from idlelib.tree import TreeItem, TreeNode, ScrolledCanvas myrepr = Repr() myrepr.maxstring = 100 myrepr.maxother = 100 class ObjectTreeItem(TreeItem): def __init__(self, labeltext, object, setfunction=None): self.labeltext = labeltext self.object = object self.setfunction = setfunction def GetLabelText(self): return self.labeltext def GetText(self): return myrepr.repr(self.object)
class EnvDict(Mapping): """ Implements the initialization functionality for Env, except it allows to more than one instance to exist, this is used internally to allow factory functions introspection without having to create an actual Env Parameters ---------- source : dict or str If str, it will be interpreted as a path to a YAML file path_to_here : str or pathlib.Path Value used to expand the {{here}} placeholder. If None, it uses the location of the YAML spec. If initialized with a dict and None, the {{here}} placeholder is not available. defaults : dict, default=None Default values to use. If not None, it uses these as defaults and overwrites keys using values in source Notes ----- By default, it includes the following placeholders (unless the passed dictionary already contains those keys): {{user}} (current user) {{cwd}} (working directory), {{here}} (env.yaml location, if any), {{root}} (project's root folder, if any) """ def __init__(self, source, path_to_here=None, defaults=None): # if initialized from another EnvDict, copy the attributes to # initialize # this happens in the CLI parser, which instanttiates the env # because it needs to create one and then replace cli args, then # passes this modified object to DAGSpec if isinstance(source, EnvDict): for attr in ( '_path_to_env', '_preprocessed', '_expander', '_data', '_repr', '_default_keys', ): original = getattr(source, attr) setattr(self, attr, deepcopy(original)) else: ( # load data raw_data, # this will be None if source is a dict self._path_to_env) = load_from_source(source) if defaults: raw_data = {**defaults, **raw_data} # add default placeholders but override them if they are defined # in the raw data default = self._default_dict(include_here=path_to_here is not None) self._default_keys = set(default) - set(raw_data) raw_data = {**default, **raw_data} # check raw data is ok validate.raw_data_keys(raw_data) # expand _module special key, return its expanded value self._preprocessed = raw_preprocess(raw_data, self._path_to_env) # initialize expander, which converts placeholders to their values # we need to pass path_to_env since the {{here}} placeholder # resolves to its parent if path_to_here is None: # if no pat_to_here, use path_to_end path_to_here = (None if self._path_to_env is None else Path( self._path_to_env).parent) else: path_to_here = Path(path_to_here).resolve() self._expander = EnvironmentExpander(self._preprocessed, path_to_here=path_to_here) # now expand all values self._data = self._expander.expand_raw_dictionary(raw_data) self._repr = Repr() @classmethod def find(cls, source): """ Find env file recursively, currently only used by the @with_env decorator """ if not Path(source).exists(): source_found, _ = default.find_file_recursively(source) if source_found is None: raise FileNotFoundError('Could not find file "{}" in the ' 'current working directory nor ' '6 levels up'.format(source)) else: source = source_found return cls(source, path_to_here=Path(source).parent) @property def default_keys(self): """Returns keys whose default value is used (i.e., if the user overrides them, they won't appear) """ return self._default_keys @staticmethod def _default_dict(include_here): placeholders = { 'user': '******', 'cwd': '{{cwd}}', 'now': '{{now}}', } if default.try_to_find_root_recursively() is not None: placeholders['root'] = '{{root}}' if include_here: placeholders['here'] = '{{here}}' return placeholders @property def path_to_env(self): return self._path_to_env def __getattr__(self, key): error = AttributeError("'{}' object has no attribute '{}'".format( type(self).__name__, key)) # do not look up special atttributes this way! if key.startswith('__') and key.endswith('__'): raise error if key in self: return self[key] else: raise AttributeError("{} object has no atttribute '{}'".format( repr(self), key)) def __getitem__(self, key): try: return self._getitem(key) except KeyError as e: # custom error will be displayed around quotes, but it's fine. # this is due to the KeyError.__str__ implementation msg = "{} object has no key '{}'".format(repr(self), key) e.args = (msg, ) raise def _getitem(self, key): if key in self._preprocessed: return FrozenJSON(self._preprocessed[key]) else: return FrozenJSON(self._data[key]) def __setitem__(self, key, value): self._data[key] = value def __iter__(self): for k in self._data: yield k def __len__(self): return len(self._data) def __str__(self): return str(self._data) def __repr__(self): content = self._repr.repr_dict(self._data, level=2) return f'{type(self).__name__}({content})' def _replace_value(self, value, keys_all): """ Replace a value in the underlying dictionary, by passing a value and a list of keys e.g. given {'a': {'b': 1}}, we can replace 1 by doing _replace_value(2, ['a', 'b']) """ keys_to_final_dict = keys_all[:-1] key_to_edit = keys_all[-1] dict_to_edit = self._data for e in keys_to_final_dict: dict_to_edit = dict_to_edit[e] if dict_to_edit.get(key_to_edit) is None: dotted_path = '.'.join(keys_all) raise KeyError('Trying to replace key "{}" in env, ' 'but it does not exist'.format(dotted_path)) dict_to_edit[key_to_edit] = (self._expander.expand_raw_value( value, keys_all)) def _inplace_replace_flatten_key(self, value, key_flatten): """ Replace a value in the underlying dictionary, by passing a value and a list of keys e.g. given {'a': {'b': 1}}, we can replace 1 by doing _replace_flatten_keys(2, 'env__a__b'). This function is used internally to overrive env values when calling factories (functions decorated with @with_env or when doing so via the command line interface - ploomber build pipeline.yaml --env--a--b 2) Returns a copy """ # convert env__a__b__c -> ['a', 'b', 'c'] parts = key_flatten.split('__') if parts[0] != 'env': raise ValueError('keys_flatten must start with env__') keys_all = parts[1:] self._replace_value(value, keys_all) def _replace_flatten_key(self, value, key_flatten): obj = copy(self) obj._inplace_replace_flatten_key(value, key_flatten) return obj def _inplace_replace_flatten_keys(self, to_replace): """Replace multiple keys at once Returns a copy """ for key, value in to_replace.items(): self._inplace_replace_flatten_key(value, key) def _replace_flatten_keys(self, to_replace): obj = copy(self) obj._inplace_replace_flatten_keys(to_replace) return obj
class File(ProductWithClientMixin, os.PathLike, Product): """A file (or directory) in the local filesystem Parameters ---------- identifier: str or pathlib.Path The path to the file (or directory), can contain placeholders (e.g. {{placeholder}}) """ def __init__(self, identifier, client=None): super().__init__(identifier) self._client = client self._repr = Repr() self._repr.maxstring = 40 self._remote_ = _RemoteFile(self) def _init_identifier(self, identifier): if not isinstance(identifier, (str, Path)): raise TypeError('File must be initialized with a str or a ' 'pathlib.Path') return Placeholder(str(identifier)) @property def _path_to_file(self): return Path(str(self._identifier)) @property def _path_to_metadata(self): name = f'.{self._path_to_file.name}.metadata' return self._path_to_file.with_name(name) @property def _remote(self): """ RemoteFile for this File. Returns None if a File.client doesn't exist, remote file doesn't exist or remote metadata doesn't exist """ return self._remote_ @property def _remote_path_to_metadata(self): return self._remote._path_to_metadata def fetch_metadata(self): # migrate metadata file to keep compatibility with ploomber<0.10 old_name = Path(str(self._path_to_file) + '.source') if old_name.is_file(): shutil.move(old_name, self._path_to_metadata) return _fetch_metadata_from_file_product(self, check_file_exists=True) def save_metadata(self, metadata): self._path_to_metadata.write_text(json.dumps(metadata)) def _delete_metadata(self): if self._path_to_metadata.exists(): os.remove(str(self._path_to_metadata)) def exists(self): return self._path_to_file.exists() def delete(self, force=False): # force is not used for this product but it is left for API # compatibility if self.exists(): self.logger.debug('Deleting %s', self._path_to_file) if self._path_to_file.is_dir(): shutil.rmtree(str(self._path_to_file)) else: os.remove(str(self._path_to_file)) else: self.logger.debug('%s does not exist ignoring...', self._path_to_file) def __repr__(self): # do not shorten, we need to process the actual path path = Path(self._identifier.best_repr(shorten=False)) # if absolute, try to show a shorter version, if possible if path.is_absolute(): try: path = path.relative_to(Path('.').resolve()) except ValueError: # happens if the path is not a file/folder within the current # working directory pass content = self._repr.repr(str(path)) return f'{type(self).__name__}({content})' def _check_is_outdated(self, outdated_by_code): """ Unlike other Product implementation that only have to check the current metadata, File has to check if there is a metadata remote copy and download it to decide outdated status, which yield to task execution or product downloading """ should_download = False if self._remote.exists(): if self._remote._is_equal_to_local_copy(): return self._remote._is_outdated(with_respect_to_local=True) else: # download when doing so will bring the product # up-to-date (this takes into account upstream # timestamps) should_download = not self._remote._is_outdated( with_respect_to_local=True, outdated_by_code=outdated_by_code) if should_download: return TaskStatus.WaitingDownload # no need to download, check status using local metadata return super()._check_is_outdated(outdated_by_code=outdated_by_code) def _is_remote_outdated(self, outdated_by_code): """ Check status using remote metadata, if no remote is available (or remote metadata is corrupted) returns True """ if self._remote.exists(): return self._remote._is_outdated(with_respect_to_local=False, outdated_by_code=outdated_by_code) else: # if no remote, return True. This is the least destructive option # since we don't know what will be available and what not when this # executes return True @property def client(self): try: client = super().client except MissingClientError: return None else: return client def download(self): self.logger.info('Downloading %s...', self._path_to_file) if self.client: self.client.download(str(self._path_to_file)) self.client.download(str(self._path_to_metadata)) def upload(self): if self.client: if not self._path_to_metadata.exists(): raise RuntimeError( f'Error uploading product {self!r}. ' f'Metadata {str(self._path_to_metadata)!r} does ' 'not exist') if not self._path_to_file.exists(): raise RuntimeError(f'Error uploading product {self!r}. ' f'Product {str(self._path_to_file)!r} does ' 'not exist') self.logger.info('Uploading %s...', self._path_to_file) self.client.upload(self._path_to_metadata) self.client.upload(self._path_to_file) def __fspath__(self): """ Abstract method defined in the os.PathLike interface, enables this to work: ``import pandas as pd; pd.read_csv(File('file.csv'))`` """ return str(self) def __eq__(self, other): return Path(str(self)).resolve() == Path(str(other)).resolve() def __hash__(self): return hash(Path(str(self)).resolve())
def repr(self, object): return Repr.repr(self, object)
class CommandProcessor(Mprocessor.Processor): def __init__(self, core_obj, opts=None): get_option = lambda key: \ Mmisc.option_set(opts, key, DEFAULT_PROC_OPTS) Mprocessor.Processor.__init__(self, core_obj) self.continue_running = False # True if we should leave command loop self.event2short = dict(EVENT2SHORT) self.event2short['signal'] = '?!' self.event2short['brkpt'] = 'xx' self.optional_modules = ('ipython', 'bpy') self.cmd_instances = self._populate_commands() # command argument string. Is like current_command, but the part # after cmd_name has been removed. self.cmd_argstr = '' # command name before alias or macro resolution self.cmd_name = '' self.cmd_queue = [] # Queued debugger commands self.completer = lambda text, state: \ Mcomplete.completer(self, text, state) self.current_command = '' # Current command getting run self.debug_nest = 1 self.display_mgr = Mdisplay.DisplayMgr() self.intf = core_obj.debugger.intf self.last_command = None # Initially a no-op self.precmd_hooks = [] self.location = lambda: print_location(self) self.preloop_hooks = [] self.postcmd_hooks = [] self._populate_cmd_lists() self.prompt_str = '(trepan3k) ' # Stop only if line/file is different from last time self.different_line = None # These values updated on entry. Set initial values. self.curframe = None self.event = None self.event_arg = None self.frame = None self.list_lineno = 0 # last list number used in "list" self.list_filename = None # last filename used in list self.macros = {} # Debugger Macros # Create a custom safe Repr instance and increase its maxstring. # The default of 30 truncates error messages too easily. self._repr = Repr() self._repr.maxstring = 100 self._repr.maxother = 60 self._repr.maxset = 10 self._repr.maxfrozen = 10 self._repr.array = 10 self.stack = [] self.thread_name = None self.frame_thread_name = None initfile_list = get_option('initfile_list') for init_cmdfile in initfile_list: self.queue_startfile(init_cmdfile) return def _saferepr(self, str, maxwidth=None): if maxwidth is None: maxwidth = self.debugger.settings['width'] return self._repr.repr(str)[:maxwidth] def add_preloop_hook(self, hook, position=-1, nodups=True): if hook in self.preloop_hooks: return False self.preloop_hooks.insert(position, hook) return True # To be overridden in derived debuggers def defaultFile(self): """Produce a reasonable default.""" filename = self.curframe.f_code.co_filename # Consider using is_exec_stmt(). I just don't understand # the conditions under which the below test is true. if filename == '<string>' and self.debugger.mainpyfile: filename = self.debugger.mainpyfile pass return filename def set_prompt(self, prompt='trepan3k'): if self.thread_name and self.thread_name != 'MainThread': prompt += ':' + self.thread_name pass self.prompt_str = '%s%s%s' % ('(' * self.debug_nest, prompt, ')' * self.debug_nest) highlight = self.debugger.settings['highlight'] if highlight and highlight in ('light', 'dark'): self.prompt_str = colorize('underline', self.prompt_str) self.prompt_str += ' ' def event_processor(self, frame, event, event_arg, prompt='trepan3k'): 'command event processor: reading a commands do something with them.' self.frame = frame self.event = event self.event_arg = event_arg filename = frame.f_code.co_filename lineno = frame.f_lineno line = linecache.getline(filename, lineno, frame.f_globals) if not line: opts = { 'output': 'plain', 'reload_on_change': self.settings('reload'), 'strip_nl': False } m = re.search('^<frozen (.*)>', filename) if m and m.group(1): filename = pyficache.unmap_file(m.group(1)) line = pyficache.getline(filename, lineno, opts) self.current_source_text = line if self.settings('skip') is not None: if Mbytecode.is_def_stmt(line, frame): return True if Mbytecode.is_class_def(line, frame): return True pass self.thread_name = Mthread.current_thread_name() self.frame_thread_name = self.thread_name self.set_prompt(prompt) self.process_commands() if filename == '<string>': pyficache.remove_remap_file('<string>') return True def forget(self): """ Remove memory of state variables set in the command processor """ self.stack = [] self.curindex = 0 self.curframe = None self.thread_name = None self.frame_thread_name = None return def eval(self, arg): """Eval string arg in the current frame context.""" try: return eval(arg, self.curframe.f_globals, self.curframe.f_locals) except: t, v = sys.exc_info()[:2] if isinstance(t, str): exc_type_name = t pass else: exc_type_name = t.__name__ self.errmsg(str("%s: %s" % (exc_type_name, arg))) raise return None # Not reached def exec_line(self, line): if self.curframe: local_vars = self.curframe.f_locals global_vars = self.curframe.f_globals else: local_vars = None # FIXME: should probably have place where the # user can store variables inside the debug session. # The setup for this should be elsewhere. Possibly # in interaction. global_vars = None try: code = compile(line + '\n', '"%s"' % line, 'single') exec(code, global_vars, local_vars) except: t, v = sys.exc_info()[:2] if type(t) == bytes: exc_type_name = t else: exc_type_name = t.__name__ self.errmsg('%s: %s' % (str(exc_type_name), str(v))) pass return def parse_position(self, arg, old_mod=None): """parse_position(self, arg)->(fn, name, lineno) Parse arg as [filename:]lineno | function | module Make sure it works for C:\foo\bar.py:12 """ colon = arg.rfind(':') if colon >= 0: # First handle part before the colon arg1 = arg[:colon].rstrip() lineno_str = arg[colon + 1:].lstrip() (mf, filename, lineno) = self.parse_position_one_arg(arg1, old_mod, False) if filename is None: filename = self.core.canonic(arg1) # Next handle part after the colon val = self.get_an_int(lineno_str, "Bad line number: %s" % lineno_str) if val is not None: lineno = val else: (mf, filename, lineno) = self.parse_position_one_arg(arg, old_mod) pass return mf, filename, lineno def parse_position_one_arg(self, arg, old_mod=None, show_errmsg=True): """parse_position_one_arg(self, arg, show_errmsg) -> (module/function, file, lineno) See if arg is a line number, function name, or module name. Return what we've found. None can be returned as a value in the triple. """ modfunc, filename, lineno = (None, None, None) if self.curframe: g = self.curframe.f_globals l = self.curframe.f_locals else: g = globals() l = locals() pass try: # First see if argument is an integer lineno = int(eval(arg, g, l)) if old_mod is None: filename = self.curframe.f_code.co_filename pass except: try: modfunc = eval(arg, g, l) except: modfunc = arg pass msg = ('Object %s is not known yet as a function, module, ' 'or is not found along sys.path, ' 'and not a line number.') % str(repr(arg)) try: # See if argument is a module or function if inspect.isfunction(modfunc): pass elif inspect.ismodule(modfunc): filename = pyficache.pyc2py(modfunc.__file__) filename = self.core.canonic(filename) return (modfunc, filename, None) elif hasattr(modfunc, 'im_func'): modfunc = modfunc.__func__ pass else: if show_errmsg: self.errmsg(msg) return (None, None, None) code = modfunc.__code__ lineno = code.co_firstlineno filename = code.co_filename except: if show_errmsg: self.errmsg(msg) return (None, None, None) pass return (modfunc, self.core.canonic(filename), lineno) def get_an_int(self, arg, msg_on_error, min_value=None, max_value=None): """Like cmdfns.get_an_int(), but if there's a stack frame use that in evaluation.""" ret_value = self.get_int_noerr(arg) if ret_value is None: if msg_on_error: self.errmsg(msg_on_error) else: self.errmsg('Expecting an integer, got: %s.' % str(arg)) pass return None if min_value and ret_value < min_value: self.errmsg('Expecting integer value to be at least %d, got: %d.' % (min_value, ret_value)) return None elif max_value and ret_value > max_value: self.errmsg('Expecting integer value to be at most %d, got: %d.' % (max_value, ret_value)) return None return ret_value def get_int_noerr(self, arg): """Eval arg and it is an integer return the value. Otherwise return None""" if self.curframe: g = self.curframe.f_globals l = self.curframe.f_locals else: g = globals() l = locals() pass try: val = int(eval(arg, g, l)) except (SyntaxError, NameError, ValueError, TypeError): return None return val def get_int(self, arg, min_value=0, default=1, cmdname=None, at_most=None): """If no argument use the default. If arg is a an integer between least min_value and at_most, use that. Otherwise report an error. If there's a stack frame use that in evaluation.""" if arg is None: return default default = self.get_int_noerr(arg) if default is None: if cmdname: self.errmsg( ("Command '%s' expects an integer; " + "got: %s.") % (cmdname, str(arg))) else: self.errmsg('Expecting a positive integer, got: %s' % str(arg)) pass return None pass if default < min_value: if cmdname: self.errmsg(("Command '%s' expects an integer at least" + ' %d; got: %d.') % (cmdname, min_value, default)) else: self.errmsg( ("Expecting a positive integer at least" + ' %d; got: %d') % (min_value, default)) pass return None elif at_most and default > at_most: if cmdname: self.errmsg(("Command '%s' expects an integer at most" + ' %d; got: %d.') % (cmdname, at_most, default)) else: self.errmsg(("Expecting an integer at most %d; got: %d") % (at_most, default)) pass pass return default def getval(self, arg): try: return eval(arg, self.curframe.f_globals, self.curframe.f_locals) except: t, v = sys.exc_info()[:2] if isinstance(t, str): exc_type_name = t else: exc_type_name = t.__name__ self.errmsg(str("%s: %s" % (exc_type_name, arg))) raise return def ok_for_running(self, cmd_obj, name, nargs): '''We separate some of the common debugger command checks here: whether it makes sense to run the command in this execution state, if the command has the right number of arguments and so on. ''' if hasattr(cmd_obj, 'execution_set'): if not (self.core.execution_status in cmd_obj.execution_set): part1 = ( "Command '%s' is not available for execution status:" % name) mess = Mmisc.wrapped_lines(part1, self.core.execution_status, self.debugger.settings['width']) self.errmsg(mess) return False pass if self.frame is None and cmd_obj.need_stack: self.intf[-1].errmsg("Command '%s' needs an execution stack." % name) return False if nargs < cmd_obj.min_args: self.errmsg( ("Command '%s' needs at least %d argument(s); " + "got %d.") % (name, cmd_obj.min_args, nargs)) return False elif cmd_obj.max_args is not None and nargs > cmd_obj.max_args: self.errmsg( ("Command '%s' can take at most %d argument(s);" + " got %d.") % (name, cmd_obj.max_args, nargs)) return False return True def process_commands(self): """Handle debugger commands.""" if self.core.execution_status != 'No program': self.setup() self.location() pass leave_loop = run_hooks(self, self.preloop_hooks) self.continue_running = False while not leave_loop: try: run_hooks(self, self.precmd_hooks) # bdb had a True return to leave loop. # A more straight-forward way is to set # instance variable self.continue_running. leave_loop = self.process_command() if leave_loop or self.continue_running: break except EOFError: # If we have stacked interfaces, pop to the next # one. If this is the last one however, we'll # just stick with that. FIXME: Possibly we should # check to see if we are interactive. and not # leave if that's the case. Is this the right # thing? investigate and fix. if len(self.debugger.intf) > 1: del self.debugger.intf[-1] self.last_command = '' else: if self.debugger.intf[-1].output: self.debugger.intf[-1].output.writeline('Leaving') raise Mexcept.DebuggerQuit pass break pass pass return run_hooks(self, self.postcmd_hooks) def process_command(self): # process command if len(self.cmd_queue) > 0: current_command = self.cmd_queue[0].strip() del self.cmd_queue[0] else: current_command = (self.intf[-1].read_command( self.prompt_str).strip()) if '' == current_command and self.intf[-1].interactive: current_command = self.last_command pass pass # Look for comments if '' == current_command: if self.intf[-1].interactive: self.errmsg("No previous command registered, " + "so this is a no-op.") pass return False if current_command is None or current_command[0] == '#': return False try: args_list = arg_split(current_command) except: self.errmsg("bad parse %s: %s" % sys.exc_info()[0:2]) import traceback for s in traceback.format_tb(sys.exc_info()[2], limit=None): self.errmsg(s.strip()) return False for args in args_list: if len(args): while True: if len(args) == 0: return False macro_cmd_name = args[0] if macro_cmd_name not in self.macros: break try: current_command = \ self.macros[macro_cmd_name][0](*args[1:]) except TypeError: t, v = sys.exc_info()[:2] self.errmsg("Error expanding macro %s" % macro_cmd_name) return False if self.settings('debugmacro'): print(current_command) pass if type(current_command) == list: for x in current_command: if str != type(x): self.errmsg(("macro %s should return a List " + "of Strings. Has %s of type %s") % (macro_cmd_name, x, repr(current_command), type(x))) return False pass first = current_command[0] args = first.split() self.cmd_queue + [current_command[1:]] current_command = first elif type(current_command) == str: args = current_command.split() else: self.errmsg(("macro %s should return a List " + "of Strings or a String. Got %s") % (macro_cmd_name, repr(current_command))) return False pass self.cmd_name = args[0] cmd_name = resolve_name(self, self.cmd_name) self.cmd_argstr = current_command[len(self.cmd_name):].lstrip() if cmd_name: self.last_command = current_command cmd_obj = self.commands[cmd_name] if self.ok_for_running(cmd_obj, cmd_name, len(args) - 1): try: self.current_command = current_command result = cmd_obj.run(args) if result: return result except (Mexcept.DebuggerQuit, Mexcept.DebuggerRestart, SystemExit): # Let these exceptions propagate through raise except: self.errmsg("INTERNAL ERROR: " + traceback.format_exc()) pass pass pass elif not self.settings('autoeval'): self.undefined_cmd(current_command) else: self.exec_line(current_command) pass pass pass return False def remove_preloop_hook(self, hook): try: position = self.preloop_hooks.index(hook) except ValueError: return False del self.preloop_hooks[position] return True def setup(self): """Initialization done before entering the debugger-command loop. In particular we set up the call stack used for local variable lookup and frame/up/down commands. We return True if we should NOT enter the debugger-command loop.""" self.forget() if self.settings('dbg_trepan'): self.frame = inspect.currentframe() pass if self.event in ['exception', 'c_exception']: exc_type, exc_value, exc_traceback = self.event_arg else: _, _, exc_traceback = ( None, None, None, ) # NOQA pass if self.frame or exc_traceback: self.stack, self.curindex = \ get_stack(self.frame, exc_traceback, None, self) self.curframe = self.stack[self.curindex][0] self.thread_name = Mthread.current_thread_name() if exc_traceback: self.list_lineno = traceback.extract_tb(exc_traceback, 1)[0][1] else: self.stack = self.curframe = \ self.botframe = None pass if self.curframe: self.list_lineno = \ max(1, inspect.getlineno(self.curframe) - int(self.settings('listsize') / 2)) - 1 self.list_filename = self.curframe.f_code.co_filename else: if not exc_traceback: self.list_lineno = None pass # if self.execRcLines()==1: return True return False def queue_startfile(self, cmdfile): '''Arrange for file of debugger commands to get read in the process-command loop.''' expanded_cmdfile = os.path.expanduser(cmdfile) is_readable = Mfile.readable(expanded_cmdfile) if is_readable: self.cmd_queue.append('source ' + expanded_cmdfile) elif is_readable is None: self.errmsg("source file '%s' doesn't exist" % expanded_cmdfile) else: self.errmsg("source file '%s' is not readable" % expanded_cmdfile) pass return def undefined_cmd(self, cmd): """Error message when a command doesn't exist""" self.errmsg('Undefined command: "%s". Try "help".' % cmd) return def read_history_file(self): """Read the command history file -- possibly.""" histfile = self.debugger.intf[-1].histfile try: import readline readline.read_history_file(histfile) except IOError: pass except ImportError: pass return def write_history_file(self): """Write the command history file -- possibly.""" settings = self.debugger.settings histfile = self.debugger.intf[-1].histfile if settings['hist_save']: try: import readline try: readline.write_history_file(histfile) except IOError: pass except ImportError: pass pass return def _populate_commands(self): """ Create an instance of each of the debugger commands. Commands are found by importing files in the directory 'command'. Some files are excluded via an array set in __init__. For each of the remaining files, we import them and scan for class names inside those files and for each class name, we will create an instance of that class. The set of DebuggerCommand class instances form set of possible debugger commands.""" from trepan.processor import command as Mcommand if hasattr(Mcommand, '__modules__'): return self.populate_commands_easy_install(Mcommand) else: return self.populate_commands_pip(Mcommand) def populate_commands_pip(self, Mcommand): cmd_instances = [] eval_cmd_template = 'command_mod.%s(self)' for mod_name in Mcommand.__dict__.keys(): if mod_name.startswith('__'): continue import_name = "trepan.processor.command." + mod_name imp = __import__(import_name) if imp.__name__ == 'trepan': command_mod = imp.processor.command else: if mod_name in ( 'info_sub', 'set_sub', 'show_sub', ): pass try: command_mod = getattr(__import__(import_name), mod_name) except: # Don't need to warn about optional modules if mod_name not in self.optional_modules: print('Error importing %s: %s' % (mod_name, sys.exc_info()[0])) pass continue pass classnames = [ tup[0] for tup in inspect.getmembers(command_mod, inspect.isclass) if ('DebuggerCommand' != tup[0] and tup[0].endswith('Command')) ] for classname in classnames: eval_cmd = eval_cmd_template % classname if False: instance = eval(eval_cmd) cmd_instances.append(instance) else: try: instance = eval(eval_cmd) cmd_instances.append(instance) except: print('Error loading %s from %s: %s' % (classname, mod_name, sys.exc_info()[0])) pass pass pass pass return cmd_instances def populate_commands_easy_install(self, Mcommand): cmd_instances = [] srcdir = get_srcdir() sys.path.insert(0, srcdir) for mod_name in Mcommand.__modules__: if mod_name in ( 'info_sub', 'set_sub', 'show_sub', ): pass import_name = "command." + mod_name try: command_mod = getattr(__import__(import_name), mod_name) except: if mod_name not in self.optional_modules: print('Error importing %s: %s' % (mod_name, sys.exc_info()[0])) pass continue classnames = [ tup[0] for tup in inspect.getmembers(command_mod, inspect.isclass) if ('DebuggerCommand' != tup[0] and tup[0].endswith('Command')) ] for classname in classnames: if False: instance = getattr(command_mod, classname)(self) cmd_instances.append(instance) else: try: instance = getattr(command_mod, classname)(self) cmd_instances.append(instance) except: print('Error loading %s from %s: %s' % (classname, mod_name, sys.exc_info()[0])) pass pass pass pass return cmd_instances def _populate_cmd_lists(self): """ Populate self.lists and hashes: self.commands, and self.aliases, self.category """ self.commands = {} self.aliases = {} self.category = {} # self.short_help = {} for cmd_instance in self.cmd_instances: if not hasattr(cmd_instance, 'aliases'): continue alias_names = cmd_instance.aliases cmd_name = cmd_instance.name self.commands[cmd_name] = cmd_instance for alias_name in alias_names: self.aliases[alias_name] = cmd_name pass cat = getattr(cmd_instance, 'category') if cat and self.category.get(cat): self.category[cat].append(cmd_name) else: self.category[cat] = [cmd_name] pass # sh = getattr(cmd_instance, 'short_help') # if sh: # self.short_help[cmd_name] = getattr(c, 'short_help') # pass pass for k in list(self.category.keys()): self.category[k].sort() pass return pass
def repr_values(condition: Callable[..., bool], lambda_inspection: Optional[ConditionLambdaInspection], resolved_kwargs: Mapping[str, Any], a_repr: reprlib.Repr) -> List[str]: """ Represent function arguments and frame values in the error message on contract breach. :param condition: condition function of the contract :param lambda_inspection: inspected lambda AST node corresponding to the condition function (None if the condition was not given as a lambda function) :param resolved_kwargs: arguments put in the function call :param a_repr: representation instance that defines how the values are represented. :return: list of value representations """ # Hide _ARGS and _KWARGS if they are not part of the condition for better readability if '_ARGS' in resolved_kwargs or '_KWARGS' in resolved_kwargs: parameters = inspect.signature(condition).parameters malleable_kwargs = cast( MutableMapping[str, Any], resolved_kwargs.copy() # type: ignore ) if '_ARGS' not in parameters: malleable_kwargs.pop('_ARGS', None) if '_KWARGS' not in parameters: malleable_kwargs.pop('_KWARGS', None) selected_kwargs = cast(Mapping[str, Any], malleable_kwargs) else: selected_kwargs = resolved_kwargs # Don't use ``resolved_kwargs`` from this point on. # ``selected_kwargs`` is meant to be used instead for better readability of error messages. if is_lambda(a_function=condition): assert lambda_inspection is not None, "Expected a lambda inspection when given a condition as a lambda function" else: assert lambda_inspection is None, "Expected no lambda inspection in a condition given as a non-lambda function" reprs = None # type: Optional[MutableMapping[str, Any]] if lambda_inspection is not None: variable_lookup = collect_variable_lookup( condition=condition, resolved_kwargs=selected_kwargs) recompute_visitor = icontract._recompute.Visitor( variable_lookup=variable_lookup) recompute_visitor.visit(node=lambda_inspection.node.body) recomputed_values = recompute_visitor.recomputed_values repr_visitor = Visitor(recomputed_values=recomputed_values, variable_lookup=variable_lookup, atok=lambda_inspection.atok) repr_visitor.visit(node=lambda_inspection.node.body) reprs = repr_visitor.reprs # Add original arguments from the call unless they shadow a variable in the re-computation. # # The condition arguments are often not sufficient to figure out the error. The user usually needs # more context which is captured in the remainder of the call arguments. if reprs is None: reprs = dict() for key in sorted(selected_kwargs.keys()): val = selected_kwargs[key] if key not in reprs and _representable(value=val): reprs[key] = val parts = [] # type: List[str] # We need to sort in order to present the same violation error on repeated violations. # Otherwise, the order of the reported arguments may be arbitrary. for key in sorted(reprs.keys()): value = reprs[key] if isinstance(value, icontract._recompute.FirstExceptionInAll): writing = ['{} was False, e.g., with'.format(key)] for input_name, input_value in value.inputs: writing.append('\n') writing.append(' {} = {}'.format(input_name, a_repr.repr(input_value))) parts.append(''.join(writing)) else: parts.append('{} was {}'.format(key, a_repr.repr(value))) return parts
def safe_max(*args): iterable = args[0] if len(args) == 1 else args return max((value for value in iterable if value is not None), default=None) def is_ipython(): try: __IPYTHON__ return True except NameError: return False compact_repr = Repr() compact_repr.maxstring = 80 compact_repr.maxother = 80 compact_repr = compact_repr.repr class OmittedType: __slots__ = () def __repr__(self): return "<...>" Omitted = OmittedType()
standard_library.install_aliases() from builtins import open import inspect import smtplib import sys import string import tempfile import traceback import xmlrpc.client from reprlib import Repr _repr = Repr() _repr.maxstring = 3000 _saferepr = _repr.repr def printTraceBack(tb=None, output=sys.stderr, exc_type=None, exc_msg=None): if isinstance(output, str): output = open(output, 'w') exc_info = sys.exc_info() if tb is None: tb = exc_info[2] if exc_type is None: exc_type = exc_info[0]
def repr_values(condition: Callable[..., bool], lambda_inspection: Optional[ConditionLambdaInspection], condition_kwargs: Mapping[str, Any], a_repr: reprlib.Repr) -> List[str]: # pylint: disable=too-many-locals """ Represent function arguments and frame values in the error message on contract breach. :param condition: condition function of the contract :param lambda_inspection: inspected lambda AST node corresponding to the condition function (None if the condition was not given as a lambda function) :param condition_kwargs: condition arguments :param a_repr: representation instance that defines how the values are represented. :return: list of value representations """ if _is_lambda(a_function=condition): assert lambda_inspection is not None, "Expected a lambda inspection when given a condition as a lambda function" else: assert lambda_inspection is None, "Expected no lambda inspection in a condition given as a non-lambda function" reprs = dict() # type: MutableMapping[str, Any] if lambda_inspection is not None: # Collect the variable lookup of the condition function: variable_lookup = [] # type: List[Mapping[str, Any]] # Add condition arguments to the lookup variable_lookup.append(condition_kwargs) # Add closure to the lookup closure_dict = dict() # type: Dict[str, Any] if condition.__closure__ is not None: # type: ignore closure_cells = condition.__closure__ # type: ignore freevars = condition.__code__.co_freevars assert len(closure_cells) == len(freevars), \ "Number of closure cells of a condition function ({}) == number of free vars ({})".format( len(closure_cells), len(freevars)) for cell, freevar in zip(closure_cells, freevars): closure_dict[freevar] = cell.cell_contents variable_lookup.append(closure_dict) # Add globals to the lookup if condition.__globals__ is not None: # type: ignore variable_lookup.append(condition.__globals__) # type: ignore # pylint: disable=protected-access recompute_visitor = icontract._recompute.Visitor( variable_lookup=variable_lookup) recompute_visitor.visit(node=lambda_inspection.node.body) recomputed_values = recompute_visitor.recomputed_values repr_visitor = Visitor(recomputed_values=recomputed_values, variable_lookup=variable_lookup, atok=lambda_inspection.atok) repr_visitor.visit(node=lambda_inspection.node.body) reprs = repr_visitor.reprs else: for key, val in condition_kwargs.items(): if _representable(value=val): reprs[key] = val parts = [] # type: List[str] for key in sorted(reprs.keys()): parts.append('{} was {}'.format(key, a_repr.repr(reprs[key]))) return parts