def repr(self, obj): if isinstance(obj.__class__, DeclarativeMeta): return self.repr_Base(obj, self.maxlevel) if sys.version_info < (3,): return _Repr.repr(self, obj) else: return super(Repr, self).repr(obj)
def test_tuple(self): eq = self.assertEqual eq(r((1,)), "(1,)") t3 = (1, 2, 3) eq(r(t3), "(1, 2, 3)") r2 = Repr() r2.maxtuple = 2 expected = repr(t3)[:-2] + "...)" eq(r2.repr(t3), expected) # modified fillvalue: r3 = Repr() r3.fillvalue = '+++' r3.maxtuple = 2 expected = repr(t3)[:-2] + "+++)" eq(r3.repr(t3), expected)
def test_tuple(self): eq = self.assertEqual eq(r((1,)), "(1,)") t3 = (1, 2, 3) eq(r(t3), "(1, 2, 3)") r2 = Repr() r2.maxtuple = 2 expected = repr(t3)[:-2] + "...)" eq(r2.repr(t3), expected)
def get_prices_from_api(request_uri: str) -> dict: """using the provided URI, request data from the Octopus API and return a JSON object. Try to handle errors gracefully with retries when appropriate.""" # Try to handle issues with the API - rare but do happen, using an # exponential sleep time up to 2**14 (16384) seconds, approx 4.5 hours. # We will keep trying for over 9 hours and then give up. print('Requesting Agile prices from Octopus API...') retry_count = 0 my_repr = Repr() my_repr.maxstring = 80 # let's avoid truncating our error messages too much while retry_count <= MAX_RETRIES: if retry_count == MAX_RETRIES: raise SystemExit('API retry limit exceeded.') try: success = False response = requests.get(request_uri, timeout=5) response.raise_for_status() if response.status_code // 100 == 2: success = True return response.json() except requests.exceptions.HTTPError as error: print(('API HTTP error ' + str(response.status_code) + ',retrying in ' + str(2**retry_count) + 's')) time.sleep(2**retry_count) retry_count += 1 except requests.exceptions.ConnectionError as error: print(('API connection error: ' + my_repr.repr(str(error)) + ', retrying in ' + str(2**retry_count) + 's')) time.sleep(2**retry_count) retry_count += 1 except requests.exceptions.Timeout: print('API request timeout, retrying in ' + str(2**retry_count) + 's') time.sleep(2**retry_count) retry_count += 1 except requests.exceptions.RequestException as error: raise SystemExit('API Request error: ' + str(error)) from error if success: print('API request successful, status ' + str(response.status_code) + '.') break
def repr_values(condition: Callable[..., bool], lambda_inspection: Optional[ConditionLambdaInspection], condition_kwargs: Mapping[str, Any], a_repr: reprlib.Repr) -> List[str]: # pylint: disable=too-many-locals """ Represent function arguments and frame values in the error message on contract breach. :param condition: condition function of the contract :param lambda_inspection: inspected lambda AST node corresponding to the condition function (None if the condition was not given as a lambda function) :param condition_kwargs: condition arguments :param a_repr: representation instance that defines how the values are represented. :return: list of value representations """ if is_lambda(a_function=condition): assert lambda_inspection is not None, "Expected a lambda inspection when given a condition as a lambda function" else: assert lambda_inspection is None, "Expected no lambda inspection in a condition given as a non-lambda function" reprs = dict() # type: MutableMapping[str, Any] if lambda_inspection is not None: variable_lookup = collect_variable_lookup( condition=condition, condition_kwargs=condition_kwargs) # pylint: disable=protected-access recompute_visitor = icontract._recompute.Visitor( variable_lookup=variable_lookup) recompute_visitor.visit(node=lambda_inspection.node.body) recomputed_values = recompute_visitor.recomputed_values repr_visitor = Visitor(recomputed_values=recomputed_values, variable_lookup=variable_lookup, atok=lambda_inspection.atok) repr_visitor.visit(node=lambda_inspection.node.body) reprs = repr_visitor.reprs else: for key, val in condition_kwargs.items(): if _representable(value=val): reprs[key] = val parts = [] # type: List[str] for key in sorted(reprs.keys()): parts.append('{} was {}'.format(key, a_repr.repr(reprs[key]))) return parts
class CommandProcessor(Mprocessor.Processor): def __init__(self, core_obj, opts=None): get_option = lambda key: \ Mmisc.option_set(opts, key, DEFAULT_PROC_OPTS) Mprocessor.Processor.__init__(self, core_obj) self.continue_running = False # True if we should leave command loop self.event2short = dict(EVENT2SHORT) self.event2short['signal'] = '?!' self.event2short['brkpt'] = 'xx' self.optional_modules = ('ipython', 'bpy') self.cmd_instances = self._populate_commands() # command argument string. Is like current_command, but the part # after cmd_name has been removed. self.cmd_argstr = '' # command name before alias or macro resolution self.cmd_name = '' self.cmd_queue = [] # Queued debugger commands self.completer = lambda text, state: \ Mcomplete.completer(self, text, state) self.current_command = '' # Current command getting run self.debug_nest = 1 self.display_mgr = Mdisplay.DisplayMgr() self.intf = core_obj.debugger.intf self.last_command = None # Initially a no-op self.precmd_hooks = [] self.location = lambda: print_location(self) self.preloop_hooks = [] self.postcmd_hooks = [] self._populate_cmd_lists() self.prompt_str = '(trepan3k) ' # Stop only if line/file is different from last time self.different_line = None # These values updated on entry. Set initial values. self.curframe = None self.event = None self.event_arg = None self.frame = None self.list_lineno = 0 # last list number used in "list" self.list_filename = None # last filename used in list self.macros = {} # Debugger Macros # Create a custom safe Repr instance and increase its maxstring. # The default of 30 truncates error messages too easily. self._repr = Repr() self._repr.maxstring = 100 self._repr.maxother = 60 self._repr.maxset = 10 self._repr.maxfrozen = 10 self._repr.array = 10 self.stack = [] self.thread_name = None self.frame_thread_name = None initfile_list = get_option('initfile_list') for init_cmdfile in initfile_list: self.queue_startfile(init_cmdfile) return def _saferepr(self, str, maxwidth=None): if maxwidth is None: maxwidth = self.debugger.settings['width'] return self._repr.repr(str)[:maxwidth] def add_preloop_hook(self, hook, position=-1, nodups=True): if hook in self.preloop_hooks: return False self.preloop_hooks.insert(position, hook) return True # To be overridden in derived debuggers def defaultFile(self): """Produce a reasonable default.""" filename = self.curframe.f_code.co_filename # Consider using is_exec_stmt(). I just don't understand # the conditions under which the below test is true. if filename == '<string>' and self.debugger.mainpyfile: filename = self.debugger.mainpyfile pass return filename def set_prompt(self, prompt='trepan3k'): if self.thread_name and self.thread_name != 'MainThread': prompt += ':' + self.thread_name pass self.prompt_str = '%s%s%s' % ('(' * self.debug_nest, prompt, ')' * self.debug_nest) highlight = self.debugger.settings['highlight'] if highlight and highlight in ('light', 'dark'): self.prompt_str = colorize('underline', self.prompt_str) self.prompt_str += ' ' def event_processor(self, frame, event, event_arg, prompt='trepan3k'): 'command event processor: reading a commands do something with them.' self.frame = frame self.event = event self.event_arg = event_arg filename = frame.f_code.co_filename lineno = frame.f_lineno line = linecache.getline(filename, lineno, frame.f_globals) if not line: opts = { 'output': 'plain', 'reload_on_change': self.settings('reload'), 'strip_nl': False } m = re.search('^<frozen (.*)>', filename) if m and m.group(1): filename = pyficache.unmap_file(m.group(1)) line = pyficache.getline(filename, lineno, opts) self.current_source_text = line if self.settings('skip') is not None: if Mbytecode.is_def_stmt(line, frame): return True if Mbytecode.is_class_def(line, frame): return True pass self.thread_name = Mthread.current_thread_name() self.frame_thread_name = self.thread_name self.set_prompt(prompt) self.process_commands() if filename == '<string>': pyficache.remove_remap_file('<string>') return True def forget(self): """ Remove memory of state variables set in the command processor """ self.stack = [] self.curindex = 0 self.curframe = None self.thread_name = None self.frame_thread_name = None return def eval(self, arg): """Eval string arg in the current frame context.""" try: return eval(arg, self.curframe.f_globals, self.curframe.f_locals) except: t, v = sys.exc_info()[:2] if isinstance(t, str): exc_type_name = t pass else: exc_type_name = t.__name__ self.errmsg(str("%s: %s" % (exc_type_name, arg))) raise return None # Not reached def exec_line(self, line): if self.curframe: local_vars = self.curframe.f_locals global_vars = self.curframe.f_globals else: local_vars = None # FIXME: should probably have place where the # user can store variables inside the debug session. # The setup for this should be elsewhere. Possibly # in interaction. global_vars = None try: code = compile(line + '\n', '"%s"' % line, 'single') exec(code, global_vars, local_vars) except: t, v = sys.exc_info()[:2] if type(t) == bytes: exc_type_name = t else: exc_type_name = t.__name__ self.errmsg('%s: %s' % (str(exc_type_name), str(v))) pass return def parse_position(self, arg, old_mod=None): """parse_position(self, arg)->(fn, name, lineno) Parse arg as [filename:]lineno | function | module Make sure it works for C:\foo\bar.py:12 """ colon = arg.rfind(':') if colon >= 0: # First handle part before the colon arg1 = arg[:colon].rstrip() lineno_str = arg[colon + 1:].lstrip() (mf, filename, lineno) = self.parse_position_one_arg(arg1, old_mod, False) if filename is None: filename = self.core.canonic(arg1) # Next handle part after the colon val = self.get_an_int(lineno_str, "Bad line number: %s" % lineno_str) if val is not None: lineno = val else: (mf, filename, lineno) = self.parse_position_one_arg(arg, old_mod) pass return mf, filename, lineno def parse_position_one_arg(self, arg, old_mod=None, show_errmsg=True): """parse_position_one_arg(self, arg, show_errmsg) -> (module/function, file, lineno) See if arg is a line number, function name, or module name. Return what we've found. None can be returned as a value in the triple. """ modfunc, filename, lineno = (None, None, None) if self.curframe: g = self.curframe.f_globals l = self.curframe.f_locals else: g = globals() l = locals() pass try: # First see if argument is an integer lineno = int(eval(arg, g, l)) if old_mod is None: filename = self.curframe.f_code.co_filename pass except: try: modfunc = eval(arg, g, l) except: modfunc = arg pass msg = ('Object %s is not known yet as a function, module, ' 'or is not found along sys.path, ' 'and not a line number.') % str(repr(arg)) try: # See if argument is a module or function if inspect.isfunction(modfunc): pass elif inspect.ismodule(modfunc): filename = pyficache.pyc2py(modfunc.__file__) filename = self.core.canonic(filename) return (modfunc, filename, None) elif hasattr(modfunc, 'im_func'): modfunc = modfunc.__func__ pass else: if show_errmsg: self.errmsg(msg) return (None, None, None) code = modfunc.__code__ lineno = code.co_firstlineno filename = code.co_filename except: if show_errmsg: self.errmsg(msg) return (None, None, None) pass return (modfunc, self.core.canonic(filename), lineno) def get_an_int(self, arg, msg_on_error, min_value=None, max_value=None): """Like cmdfns.get_an_int(), but if there's a stack frame use that in evaluation.""" ret_value = self.get_int_noerr(arg) if ret_value is None: if msg_on_error: self.errmsg(msg_on_error) else: self.errmsg('Expecting an integer, got: %s.' % str(arg)) pass return None if min_value and ret_value < min_value: self.errmsg('Expecting integer value to be at least %d, got: %d.' % (min_value, ret_value)) return None elif max_value and ret_value > max_value: self.errmsg('Expecting integer value to be at most %d, got: %d.' % (max_value, ret_value)) return None return ret_value def get_int_noerr(self, arg): """Eval arg and it is an integer return the value. Otherwise return None""" if self.curframe: g = self.curframe.f_globals l = self.curframe.f_locals else: g = globals() l = locals() pass try: val = int(eval(arg, g, l)) except (SyntaxError, NameError, ValueError, TypeError): return None return val def get_int(self, arg, min_value=0, default=1, cmdname=None, at_most=None): """If no argument use the default. If arg is a an integer between least min_value and at_most, use that. Otherwise report an error. If there's a stack frame use that in evaluation.""" if arg is None: return default default = self.get_int_noerr(arg) if default is None: if cmdname: self.errmsg( ("Command '%s' expects an integer; " + "got: %s.") % (cmdname, str(arg))) else: self.errmsg('Expecting a positive integer, got: %s' % str(arg)) pass return None pass if default < min_value: if cmdname: self.errmsg(("Command '%s' expects an integer at least" + ' %d; got: %d.') % (cmdname, min_value, default)) else: self.errmsg( ("Expecting a positive integer at least" + ' %d; got: %d') % (min_value, default)) pass return None elif at_most and default > at_most: if cmdname: self.errmsg(("Command '%s' expects an integer at most" + ' %d; got: %d.') % (cmdname, at_most, default)) else: self.errmsg(("Expecting an integer at most %d; got: %d") % (at_most, default)) pass pass return default def getval(self, arg): try: return eval(arg, self.curframe.f_globals, self.curframe.f_locals) except: t, v = sys.exc_info()[:2] if isinstance(t, str): exc_type_name = t else: exc_type_name = t.__name__ self.errmsg(str("%s: %s" % (exc_type_name, arg))) raise return def ok_for_running(self, cmd_obj, name, nargs): '''We separate some of the common debugger command checks here: whether it makes sense to run the command in this execution state, if the command has the right number of arguments and so on. ''' if hasattr(cmd_obj, 'execution_set'): if not (self.core.execution_status in cmd_obj.execution_set): part1 = ( "Command '%s' is not available for execution status:" % name) mess = Mmisc.wrapped_lines(part1, self.core.execution_status, self.debugger.settings['width']) self.errmsg(mess) return False pass if self.frame is None and cmd_obj.need_stack: self.intf[-1].errmsg("Command '%s' needs an execution stack." % name) return False if nargs < cmd_obj.min_args: self.errmsg( ("Command '%s' needs at least %d argument(s); " + "got %d.") % (name, cmd_obj.min_args, nargs)) return False elif cmd_obj.max_args is not None and nargs > cmd_obj.max_args: self.errmsg( ("Command '%s' can take at most %d argument(s);" + " got %d.") % (name, cmd_obj.max_args, nargs)) return False return True def process_commands(self): """Handle debugger commands.""" if self.core.execution_status != 'No program': self.setup() self.location() pass leave_loop = run_hooks(self, self.preloop_hooks) self.continue_running = False while not leave_loop: try: run_hooks(self, self.precmd_hooks) # bdb had a True return to leave loop. # A more straight-forward way is to set # instance variable self.continue_running. leave_loop = self.process_command() if leave_loop or self.continue_running: break except EOFError: # If we have stacked interfaces, pop to the next # one. If this is the last one however, we'll # just stick with that. FIXME: Possibly we should # check to see if we are interactive. and not # leave if that's the case. Is this the right # thing? investigate and fix. if len(self.debugger.intf) > 1: del self.debugger.intf[-1] self.last_command = '' else: if self.debugger.intf[-1].output: self.debugger.intf[-1].output.writeline('Leaving') raise Mexcept.DebuggerQuit pass break pass pass return run_hooks(self, self.postcmd_hooks) def process_command(self): # process command if len(self.cmd_queue) > 0: current_command = self.cmd_queue[0].strip() del self.cmd_queue[0] else: current_command = (self.intf[-1].read_command( self.prompt_str).strip()) if '' == current_command and self.intf[-1].interactive: current_command = self.last_command pass pass # Look for comments if '' == current_command: if self.intf[-1].interactive: self.errmsg("No previous command registered, " + "so this is a no-op.") pass return False if current_command is None or current_command[0] == '#': return False try: args_list = arg_split(current_command) except: self.errmsg("bad parse %s: %s" % sys.exc_info()[0:2]) import traceback for s in traceback.format_tb(sys.exc_info()[2], limit=None): self.errmsg(s.strip()) return False for args in args_list: if len(args): while True: if len(args) == 0: return False macro_cmd_name = args[0] if macro_cmd_name not in self.macros: break try: current_command = \ self.macros[macro_cmd_name][0](*args[1:]) except TypeError: t, v = sys.exc_info()[:2] self.errmsg("Error expanding macro %s" % macro_cmd_name) return False if self.settings('debugmacro'): print(current_command) pass if type(current_command) == list: for x in current_command: if str != type(x): self.errmsg(("macro %s should return a List " + "of Strings. Has %s of type %s") % (macro_cmd_name, x, repr(current_command), type(x))) return False pass first = current_command[0] args = first.split() self.cmd_queue + [current_command[1:]] current_command = first elif type(current_command) == str: args = current_command.split() else: self.errmsg(("macro %s should return a List " + "of Strings or a String. Got %s") % (macro_cmd_name, repr(current_command))) return False pass self.cmd_name = args[0] cmd_name = resolve_name(self, self.cmd_name) self.cmd_argstr = current_command[len(self.cmd_name):].lstrip() if cmd_name: self.last_command = current_command cmd_obj = self.commands[cmd_name] if self.ok_for_running(cmd_obj, cmd_name, len(args) - 1): try: self.current_command = current_command result = cmd_obj.run(args) if result: return result except (Mexcept.DebuggerQuit, Mexcept.DebuggerRestart, SystemExit): # Let these exceptions propagate through raise except: self.errmsg("INTERNAL ERROR: " + traceback.format_exc()) pass pass pass elif not self.settings('autoeval'): self.undefined_cmd(current_command) else: self.exec_line(current_command) pass pass pass return False def remove_preloop_hook(self, hook): try: position = self.preloop_hooks.index(hook) except ValueError: return False del self.preloop_hooks[position] return True def setup(self): """Initialization done before entering the debugger-command loop. In particular we set up the call stack used for local variable lookup and frame/up/down commands. We return True if we should NOT enter the debugger-command loop.""" self.forget() if self.settings('dbg_trepan'): self.frame = inspect.currentframe() pass if self.event in ['exception', 'c_exception']: exc_type, exc_value, exc_traceback = self.event_arg else: _, _, exc_traceback = ( None, None, None, ) # NOQA pass if self.frame or exc_traceback: self.stack, self.curindex = \ get_stack(self.frame, exc_traceback, None, self) self.curframe = self.stack[self.curindex][0] self.thread_name = Mthread.current_thread_name() if exc_traceback: self.list_lineno = traceback.extract_tb(exc_traceback, 1)[0][1] else: self.stack = self.curframe = \ self.botframe = None pass if self.curframe: self.list_lineno = \ max(1, inspect.getlineno(self.curframe) - int(self.settings('listsize') / 2)) - 1 self.list_filename = self.curframe.f_code.co_filename else: if not exc_traceback: self.list_lineno = None pass # if self.execRcLines()==1: return True return False def queue_startfile(self, cmdfile): '''Arrange for file of debugger commands to get read in the process-command loop.''' expanded_cmdfile = os.path.expanduser(cmdfile) is_readable = Mfile.readable(expanded_cmdfile) if is_readable: self.cmd_queue.append('source ' + expanded_cmdfile) elif is_readable is None: self.errmsg("source file '%s' doesn't exist" % expanded_cmdfile) else: self.errmsg("source file '%s' is not readable" % expanded_cmdfile) pass return def undefined_cmd(self, cmd): """Error message when a command doesn't exist""" self.errmsg('Undefined command: "%s". Try "help".' % cmd) return def read_history_file(self): """Read the command history file -- possibly.""" histfile = self.debugger.intf[-1].histfile try: import readline readline.read_history_file(histfile) except IOError: pass except ImportError: pass return def write_history_file(self): """Write the command history file -- possibly.""" settings = self.debugger.settings histfile = self.debugger.intf[-1].histfile if settings['hist_save']: try: import readline try: readline.write_history_file(histfile) except IOError: pass except ImportError: pass pass return def _populate_commands(self): """ Create an instance of each of the debugger commands. Commands are found by importing files in the directory 'command'. Some files are excluded via an array set in __init__. For each of the remaining files, we import them and scan for class names inside those files and for each class name, we will create an instance of that class. The set of DebuggerCommand class instances form set of possible debugger commands.""" from trepan.processor import command as Mcommand if hasattr(Mcommand, '__modules__'): return self.populate_commands_easy_install(Mcommand) else: return self.populate_commands_pip(Mcommand) def populate_commands_pip(self, Mcommand): cmd_instances = [] eval_cmd_template = 'command_mod.%s(self)' for mod_name in Mcommand.__dict__.keys(): if mod_name.startswith('__'): continue import_name = "trepan.processor.command." + mod_name imp = __import__(import_name) if imp.__name__ == 'trepan': command_mod = imp.processor.command else: if mod_name in ( 'info_sub', 'set_sub', 'show_sub', ): pass try: command_mod = getattr(__import__(import_name), mod_name) except: # Don't need to warn about optional modules if mod_name not in self.optional_modules: print('Error importing %s: %s' % (mod_name, sys.exc_info()[0])) pass continue pass classnames = [ tup[0] for tup in inspect.getmembers(command_mod, inspect.isclass) if ('DebuggerCommand' != tup[0] and tup[0].endswith('Command')) ] for classname in classnames: eval_cmd = eval_cmd_template % classname if False: instance = eval(eval_cmd) cmd_instances.append(instance) else: try: instance = eval(eval_cmd) cmd_instances.append(instance) except: print('Error loading %s from %s: %s' % (classname, mod_name, sys.exc_info()[0])) pass pass pass pass return cmd_instances def populate_commands_easy_install(self, Mcommand): cmd_instances = [] srcdir = get_srcdir() sys.path.insert(0, srcdir) for mod_name in Mcommand.__modules__: if mod_name in ( 'info_sub', 'set_sub', 'show_sub', ): pass import_name = "command." + mod_name try: command_mod = getattr(__import__(import_name), mod_name) except: if mod_name not in self.optional_modules: print('Error importing %s: %s' % (mod_name, sys.exc_info()[0])) pass continue classnames = [ tup[0] for tup in inspect.getmembers(command_mod, inspect.isclass) if ('DebuggerCommand' != tup[0] and tup[0].endswith('Command')) ] for classname in classnames: if False: instance = getattr(command_mod, classname)(self) cmd_instances.append(instance) else: try: instance = getattr(command_mod, classname)(self) cmd_instances.append(instance) except: print('Error loading %s from %s: %s' % (classname, mod_name, sys.exc_info()[0])) pass pass pass pass return cmd_instances def _populate_cmd_lists(self): """ Populate self.lists and hashes: self.commands, and self.aliases, self.category """ self.commands = {} self.aliases = {} self.category = {} # self.short_help = {} for cmd_instance in self.cmd_instances: if not hasattr(cmd_instance, 'aliases'): continue alias_names = cmd_instance.aliases cmd_name = cmd_instance.name self.commands[cmd_name] = cmd_instance for alias_name in alias_names: self.aliases[alias_name] = cmd_name pass cat = getattr(cmd_instance, 'category') if cat and self.category.get(cat): self.category[cat].append(cmd_name) else: self.category[cat] = [cmd_name] pass # sh = getattr(cmd_instance, 'short_help') # if sh: # self.short_help[cmd_name] = getattr(c, 'short_help') # pass pass for k in list(self.category.keys()): self.category[k].sort() pass return pass
def repr(self, object): return Repr.repr(self, object)
def repr_values(condition: Callable[..., bool], lambda_inspection: Optional[ConditionLambdaInspection], condition_kwargs: Mapping[str, Any], a_repr: reprlib.Repr) -> List[str]: # pylint: disable=too-many-locals """ Represent function arguments and frame values in the error message on contract breach. :param condition: condition function of the contract :param lambda_inspection: inspected lambda AST node corresponding to the condition function (None if the condition was not given as a lambda function) :param condition_kwargs: condition arguments :param a_repr: representation instance that defines how the values are represented. :return: list of value representations """ if _is_lambda(a_function=condition): assert lambda_inspection is not None, "Expected a lambda inspection when given a condition as a lambda function" else: assert lambda_inspection is None, "Expected no lambda inspection in a condition given as a non-lambda function" reprs = dict() # type: MutableMapping[str, Any] if lambda_inspection is not None: # Collect the variable lookup of the condition function: variable_lookup = [] # type: List[Mapping[str, Any]] # Add condition arguments to the lookup variable_lookup.append(condition_kwargs) # Add closure to the lookup closure_dict = dict() # type: Dict[str, Any] if condition.__closure__ is not None: # type: ignore closure_cells = condition.__closure__ # type: ignore freevars = condition.__code__.co_freevars assert len(closure_cells) == len(freevars), \ "Number of closure cells of a condition function ({}) == number of free vars ({})".format( len(closure_cells), len(freevars)) for cell, freevar in zip(closure_cells, freevars): closure_dict[freevar] = cell.cell_contents variable_lookup.append(closure_dict) # Add globals to the lookup if condition.__globals__ is not None: # type: ignore variable_lookup.append(condition.__globals__) # type: ignore # pylint: disable=protected-access recompute_visitor = icontract._recompute.Visitor( variable_lookup=variable_lookup) recompute_visitor.visit(node=lambda_inspection.node.body) recomputed_values = recompute_visitor.recomputed_values repr_visitor = Visitor(recomputed_values=recomputed_values, variable_lookup=variable_lookup, atok=lambda_inspection.atok) repr_visitor.visit(node=lambda_inspection.node.body) reprs = repr_visitor.reprs else: for key, val in condition_kwargs.items(): if _representable(value=val): reprs[key] = val parts = [] # type: List[str] for key in sorted(reprs.keys()): parts.append('{} was {}'.format(key, a_repr.repr(reprs[key]))) return parts
class Placeholder(abc.AbstractPlaceholder): """ Placeholder powers all the objects that use placeholder variables ( between curly brackets). It uses a jinja2.Template object under the hood but adds a few important things: * Keeps the raw (undendered) value: template.raw * Keeps path to raw value: template.location * Strict: will not render if missing or extra parameters * Upon calling .render, saves the rendered value for later access End users should not manipulate Placeholder objects, they should be automatically created from strings, pathlib.Path or jinja2.Template objects. Placeholder is mostly used by sources whose source code are parametrized strings (e.g. SQL scripts) Parameters ---------- hot_reload : bool, optional Makes the placeholder always read the template from the file before rendering required Set of keys required for rendering Attributes ---------- variables : set Returns the set of variables in the template (values sourrounded by {{ and }}) path : pathlib.Path The location of the raw object. None if initialized with a str or with a jinja2.Template created from a str Notes ----- You can use "raise" in a placeholder to raise exceptions, useful for validating input parameters: "{% raise 'some error message' %}" """ def __init__(self, primitive, hot_reload=False, required=None): self._logger = logging.getLogger('{}.{}'.format( __name__, type(self).__name__)) self._hot_reload = hot_reload self._variables = None self.__template = None # we have to take care of 4 possible cases and make sure we have # all we need to initialize the template, this includes having # access to the raw template (str) and a way to re-initialize # the jinja.environment.loader object (to make sure copies and # pickles work) if isinstance(primitive, Path): self._path = primitive self.__raw = primitive.read_text() self._loader_init = None elif isinstance(primitive, str): self._path = None self.__raw = primitive self._loader_init = None elif isinstance(primitive, Template): # NOTE: primitive.filename will be '<template>' if Template was # loaded from a string path = Path(primitive.filename) if primitive.environment.undefined != StrictUndefined: raise ValueError('Placeholder can only be initialized ' 'from jinja2.Templates whose undefined ' 'parameter is set to ' 'jinja2.StrictUndefined, set it explicitely ' 'either in the Template or Environment ' 'constructors') # we cannot get the raw template on this case, raise error # check '<template>' first, because Path('<template>').exists() # breaks on windows if primitive.filename == '<template>' or not path.exists(): raise ValueError( 'Could not load raw source from ' 'jinja2.Template. This usually happens ' 'when the placeholder is initialised with a ' 'jinja.Template which was initialized with ' 'a string. Only jinja2.Templates loaded from ' 'the filesystem are supported. Use ' 'ploomber.SourceLoader or jinja\'s ' 'FileSystemLoader/PackageLoader to fix ' 'this issue, if you want to create a template from ' 'a string pass it directly ' 'Placeholder("some {{placeholder}}")') self._path = path self.__raw = path.read_text() self._loader_init = _make_loader_init(primitive.environment.loader) # SourceLoader returns Placeholder objects, which could inadvertedly # be passed to another Placeholder constructor when instantiating # a source object, since they sometimes use placeholders # make sure this case is covered elif isinstance(primitive, Placeholder): self._path = primitive.path self.__raw = primitive._raw self._loader_init = _make_loader_init( primitive._template.environment.loader) else: raise TypeError('{} must be initialized with a Template, ' 'Placeholder, pathlib.Path or str, ' 'got {} instead'.format( type(self).__name__, type(primitive).__name__)) if self._path is None and hot_reload: raise ValueError('hot_reload only works when Placeholder is ' 'initialized from a file') # TODO: remove self.needs_render = self._needs_render() self._str = None if self.needs_render else self._raw self._repr = Repr() self._repr.maxstring = 40 if required: self._validate_required(required) def _validate_required(self, required): missing_required = set(required) - self.variables if missing_required: msg = ('The following tags are required. ' + display_error(missing_required, required)) raise SourceInitializationError(msg) @property def _template(self): if self.__template is None or self._hot_reload: self.__template = _init_template(self._raw, self._loader_init) return self.__template @property def _raw(self): """A string with the raw jinja2.Template contents """ if self._hot_reload: self.__raw = self._path.read_text() return self.__raw @_raw.setter def _raw(self, value): self.__raw = value @property def path(self): return self._path def _needs_render(self): """ Returns true if the template is a literal and does not need any parameters to render """ env = self._template.environment # check if the template has the variable or block start string # is there any better way of checking this? needs_variables = (env.variable_start_string in self._raw and env.variable_end_string in self._raw) needs_blocks = (env.block_start_string in self._raw and env.block_end_string in self._raw) return needs_variables or needs_blocks def __str__(self): if self._str is None: raise RuntimeError('Tried to read {} {} without ' 'rendering first'.format( type(self).__name__, repr(self))) return self._str def render(self, params, optional=None, required=None): """ """ optional = optional or set() optional = set(optional) passed = set(params.keys()) available = passed | set(self._template.environment.globals) missing = self.variables - available extra = passed - self.variables - optional # FIXME: self.variables should also be updated on hot_reload if missing: raise RenderError('in {}, missing required ' 'parameters: {}, params passed: {}'.format( repr(self), missing, params)) if extra: raise RenderError('in {}, unused parameters: {}, params ' 'declared: {}'.format(repr(self), extra, self.variables)) try: self._str = self._template.render(**params) except UndefinedError as e: # TODO: we can use e.message to see which param caused the # error raise RenderError('in {}, jinja2 raised an UndefinedError, this ' 'means the template is using an attribute ' 'or item that does not exist, the original ' 'traceback is shown above. For jinja2 ' 'implementation details see: ' 'http://jinja.pocoo.org/docs/latest' '/templates/#variables'.format( repr(self))) from e return str(self) def best_repr(self, shorten): """ Returns the rendered version (if available), otherwise the raw version """ best = self._raw if self._str is None else self._str if shorten: best = self._repr.repr(best) return best @property def variables(self): """Returns declared variables in the template """ # this requires parsing the raw template, do lazy load, but override # it if hot_reload is True if self._variables is None or self._hot_reload: self._variables = util.get_tags_in_str(self._raw) return self._variables def __repr__(self): content = self.best_repr(shorten=True) return f'{type(self).__name__}({content})' def __getstate__(self): state = self.__dict__.copy() del state['_logger'] del state['_Placeholder__template'] return state def __setstate__(self, state): self.__dict__.update(state) self._logger = logging.getLogger('{}.{}'.format( __name__, type(self).__name__)) self.__template = None
def repr_values(condition: Callable[..., bool], lambda_inspection: Optional[ConditionLambdaInspection], resolved_kwargs: Mapping[str, Any], a_repr: reprlib.Repr) -> List[str]: """ Represent function arguments and frame values in the error message on contract breach. :param condition: condition function of the contract :param lambda_inspection: inspected lambda AST node corresponding to the condition function (None if the condition was not given as a lambda function) :param resolved_kwargs: arguments put in the function call :param a_repr: representation instance that defines how the values are represented. :return: list of value representations """ # Hide _ARGS and _KWARGS if they are not part of the condition for better readability if '_ARGS' in resolved_kwargs or '_KWARGS' in resolved_kwargs: parameters = inspect.signature(condition).parameters malleable_kwargs = cast( MutableMapping[str, Any], resolved_kwargs.copy() # type: ignore ) if '_ARGS' not in parameters: malleable_kwargs.pop('_ARGS', None) if '_KWARGS' not in parameters: malleable_kwargs.pop('_KWARGS', None) selected_kwargs = cast(Mapping[str, Any], malleable_kwargs) else: selected_kwargs = resolved_kwargs # Don't use ``resolved_kwargs`` from this point on. # ``selected_kwargs`` is meant to be used instead for better readability of error messages. if is_lambda(a_function=condition): assert lambda_inspection is not None, "Expected a lambda inspection when given a condition as a lambda function" else: assert lambda_inspection is None, "Expected no lambda inspection in a condition given as a non-lambda function" reprs = None # type: Optional[MutableMapping[str, Any]] if lambda_inspection is not None: variable_lookup = collect_variable_lookup( condition=condition, resolved_kwargs=selected_kwargs) recompute_visitor = icontract._recompute.Visitor( variable_lookup=variable_lookup) recompute_visitor.visit(node=lambda_inspection.node.body) recomputed_values = recompute_visitor.recomputed_values repr_visitor = Visitor(recomputed_values=recomputed_values, variable_lookup=variable_lookup, atok=lambda_inspection.atok) repr_visitor.visit(node=lambda_inspection.node.body) reprs = repr_visitor.reprs # Add original arguments from the call unless they shadow a variable in the re-computation. # # The condition arguments are often not sufficient to figure out the error. The user usually needs # more context which is captured in the remainder of the call arguments. if reprs is None: reprs = dict() for key in sorted(selected_kwargs.keys()): val = selected_kwargs[key] if key not in reprs and _representable(value=val): reprs[key] = val parts = [] # type: List[str] # We need to sort in order to present the same violation error on repeated violations. # Otherwise, the order of the reported arguments may be arbitrary. for key in sorted(reprs.keys()): value = reprs[key] if isinstance(value, icontract._recompute.FirstExceptionInAll): writing = ['{} was False, e.g., with'.format(key)] for input_name, input_value in value.inputs: writing.append('\n') writing.append(' {} = {}'.format(input_name, a_repr.repr(input_value))) parts.append(''.join(writing)) else: parts.append('{} was {}'.format(key, a_repr.repr(value))) return parts
class File(ProductWithClientMixin, os.PathLike, Product): """A file (or directory) in the local filesystem Parameters ---------- identifier: str or pathlib.Path The path to the file (or directory), can contain placeholders (e.g. {{placeholder}}) """ def __init__(self, identifier, client=None): super().__init__(identifier) self._client = client self._repr = Repr() self._repr.maxstring = 40 self._remote_ = _RemoteFile(self) def _init_identifier(self, identifier): if not isinstance(identifier, (str, Path)): raise TypeError('File must be initialized with a str or a ' 'pathlib.Path') return Placeholder(str(identifier)) @property def _path_to_file(self): return Path(str(self._identifier)) @property def _path_to_metadata(self): name = f'.{self._path_to_file.name}.metadata' return self._path_to_file.with_name(name) @property def _remote(self): """ RemoteFile for this File. Returns None if a File.client doesn't exist, remote file doesn't exist or remote metadata doesn't exist """ return self._remote_ @property def _remote_path_to_metadata(self): return self._remote._path_to_metadata def fetch_metadata(self): # migrate metadata file to keep compatibility with ploomber<0.10 old_name = Path(str(self._path_to_file) + '.source') if old_name.is_file(): shutil.move(old_name, self._path_to_metadata) return _fetch_metadata_from_file_product(self, check_file_exists=True) def save_metadata(self, metadata): self._path_to_metadata.write_text(json.dumps(metadata)) def _delete_metadata(self): if self._path_to_metadata.exists(): os.remove(str(self._path_to_metadata)) def exists(self): return self._path_to_file.exists() def delete(self, force=False): # force is not used for this product but it is left for API # compatibility if self.exists(): self.logger.debug('Deleting %s', self._path_to_file) if self._path_to_file.is_dir(): shutil.rmtree(str(self._path_to_file)) else: os.remove(str(self._path_to_file)) else: self.logger.debug('%s does not exist ignoring...', self._path_to_file) def __repr__(self): # do not shorten, we need to process the actual path path = Path(self._identifier.best_repr(shorten=False)) # if absolute, try to show a shorter version, if possible if path.is_absolute(): try: path = path.relative_to(Path('.').resolve()) except ValueError: # happens if the path is not a file/folder within the current # working directory pass content = self._repr.repr(str(path)) return f'{type(self).__name__}({content})' def _check_is_outdated(self, outdated_by_code): """ Unlike other Product implementation that only have to check the current metadata, File has to check if there is a metadata remote copy and download it to decide outdated status, which yield to task execution or product downloading """ should_download = False if self._remote.exists(): if self._remote._is_equal_to_local_copy(): return self._remote._is_outdated(with_respect_to_local=True) else: # download when doing so will bring the product # up-to-date (this takes into account upstream # timestamps) should_download = not self._remote._is_outdated( with_respect_to_local=True, outdated_by_code=outdated_by_code) if should_download: return TaskStatus.WaitingDownload # no need to download, check status using local metadata return super()._check_is_outdated(outdated_by_code=outdated_by_code) def _is_remote_outdated(self, outdated_by_code): """ Check status using remote metadata, if no remote is available (or remote metadata is corrupted) returns True """ if self._remote.exists(): return self._remote._is_outdated(with_respect_to_local=False, outdated_by_code=outdated_by_code) else: # if no remote, return True. This is the least destructive option # since we don't know what will be available and what not when this # executes return True @property def client(self): try: client = super().client except MissingClientError: return None else: return client def download(self): self.logger.info('Downloading %s...', self._path_to_file) if self.client: self.client.download(str(self._path_to_file)) self.client.download(str(self._path_to_metadata)) def upload(self): if self.client: if not self._path_to_metadata.exists(): raise RuntimeError( f'Error uploading product {self!r}. ' f'Metadata {str(self._path_to_metadata)!r} does ' 'not exist') if not self._path_to_file.exists(): raise RuntimeError(f'Error uploading product {self!r}. ' f'Product {str(self._path_to_file)!r} does ' 'not exist') self.logger.info('Uploading %s...', self._path_to_file) self.client.upload(self._path_to_metadata) self.client.upload(self._path_to_file) def __fspath__(self): """ Abstract method defined in the os.PathLike interface, enables this to work: ``import pandas as pd; pd.read_csv(File('file.csv'))`` """ return str(self) def __eq__(self, other): return Path(str(self)).resolve() == Path(str(other)).resolve() def __hash__(self): return hash(Path(str(self)).resolve())
class CommandProcessor(Processor): def __init__(self, core_obj, opts=None): get_option = lambda key: Mmisc.option_set(opts, key, DEFAULT_PROC_OPTS) super().__init__(core_obj) self.continue_running = False # True if we should leave command loop self.event2short = dict(EVENT2SHORT) self.event2short["signal"] = "?!" self.event2short["brkpt"] = "xx" self.optional_modules = ("ipython", "bpy") self.cmd_instances = self._populate_commands() # command argument string. Is like current_command, but the part # after cmd_name has been removed. self.cmd_argstr = "" # command name before alias or macro resolution self.cmd_name = "" self.cmd_queue = [] # Queued debugger commands self.completer = lambda text, state: Mcomplete.completer( self, text, state) self.current_command = "" # Current command getting run self.debug_nest = 1 self.display_mgr = Mdisplay.DisplayMgr() self.intf = core_obj.debugger.intf self.last_command = None # Initially a no-op self.precmd_hooks = [] self.location = lambda: print_location(self) self.preloop_hooks = [] self.postcmd_hooks = [] self.remap_file_re = None self._populate_cmd_lists() # Note: prompt_str's value set below isn't used. It is # computed dynamically. The value is suggestive of what it # looks like. self.prompt_str = "(trepan3k) " # Stop only if line/file is different from last time self.different_line = None # These values updated on entry. Set initial values. self.curframe = None self.event = None self.event_arg = None self.frame = None self.list_lineno = 0 # last list number used in "list" self.list_offset = -1 # last list number used in "disassemble" self.list_obj = None self.list_filename = None # last filename used in list self.list_orig_lineno = 0 # line number of frame or exception on setup self.list_filename = None # filename of frame or exception on setup self.macros = {} # Debugger Macros # Create a custom safe Repr instance and increase its maxstring. # The default of 30 truncates error messages too easily. self._repr = Repr() self._repr.maxstring = 100 self._repr.maxother = 60 self._repr.maxset = 10 self._repr.maxfrozen = 10 self._repr.array = 10 self.stack = [] self.thread_name = None self.frame_thread_name = None initfile_list = get_option("initfile_list") for init_cmdfile in initfile_list: self.queue_startfile(init_cmdfile) return def _saferepr(self, str, maxwidth=None): if maxwidth is None: maxwidth = self.debugger.settings["width"] return self._repr.repr(str)[:maxwidth] def add_preloop_hook(self, hook, position=-1, nodups=True): if hook in self.preloop_hooks: return False self.preloop_hooks.insert(position, hook) return True def add_remap_pat(self, pat, replace, clear_remap=True): pyficache.main.add_remap_pat(pat, replace, clear_remap) if clear_remap: self.file2file_remap = {} pyficache.file2file_remap = {} # To be overridden in derived debuggers def defaultFile(self): """Produce a reasonable default.""" filename = self.curframe.f_code.co_filename # Consider using is_exec_stmt(). I just don't understand # the conditions under which the below test is true. if filename == "<string>" and self.debugger.mainpyfile: filename = self.debugger.mainpyfile pass return filename def set_prompt(self, prompt="trepan3k"): if self.thread_name and self.thread_name != "MainThread": prompt += ":" + self.thread_name pass self.prompt_str = "%s%s%s" % ( "(" * self.debug_nest, prompt, ")" * self.debug_nest, ) highlight = self.debugger.settings["highlight"] if highlight and highlight in ("light", "dark"): self.prompt_str = colorize("underline", self.prompt_str) self.prompt_str += " " def event_processor(self, frame, event, event_arg, prompt="trepan3k"): """ command event processor: reading a commands do something with them. See https://docs.python.org/3/library/sys.html#sys.settrace for how this protocol works and what the events means. Of particular note those is what we return: The local trace function should return a reference to itself (or to another function for further tracing in that scope), or None to turn off tracing in that scope. If there is any error occurred in the trace function, it will be unset, just like settrace(None) is called. """ self.frame = frame self.event = event self.event_arg = event_arg filename = frame.f_code.co_filename lineno = frame.f_lineno line = linecache.getline(filename, lineno, frame.f_globals) if not line: opts = { "output": "plain", "reload_on_change": self.settings("reload"), "strip_nl": False, } m = re.search("^<frozen (.*)>", filename) if m and m.group(1): filename = pyficache.unmap_file(m.group(1)) line = pyficache.getline(filename, lineno, opts) self.current_source_text = line if self.settings("skip") is not None: if is_def_stmt(line, frame): return self.event_processor if is_class_def(line, frame): return self.event_processor pass self.thread_name = Mthread.current_thread_name() self.frame_thread_name = self.thread_name self.set_prompt(prompt) self.process_commands() if filename == "<string>": pyficache.remove_remap_file("<string>") return self.event_processor def forget(self): """Remove memory of state variables set in the command processor""" self.stack = [] self.curindex = 0 self.curframe = None self.thread_name = None self.frame_thread_name = None return def eval(self, arg, show_error=True): """Eval string arg in the current frame context.""" try: return eval(arg, self.curframe.f_globals, self.curframe.f_locals) except: t, v = sys.exc_info()[:2] if isinstance(t, str): exc_type_name = t pass else: exc_type_name = t.__name__ if show_error: self.errmsg(str("%s: %s" % (exc_type_name, arg))) raise return None # Not reached def exec_line(self, line): if self.curframe: local_vars = self.curframe.f_locals global_vars = self.curframe.f_globals else: local_vars = None # FIXME: should probably have place where the # user can store variables inside the debug session. # The setup for this should be elsewhere. Possibly # in interaction. global_vars = None try: code = compile(line + "\n", '"%s"' % line, "single") exec(code, global_vars, local_vars) except: t, v = sys.exc_info()[:2] if type(t) == bytes: exc_type_name = t else: exc_type_name = t.__name__ self.errmsg("%s: %s" % (str(exc_type_name), str(v))) pass return def get_an_int(self, arg, msg_on_error, min_value=None, max_value=None): """Like cmdfns.get_an_int(), but if there's a stack frame use that in evaluation.""" ret_value = self.get_int_noerr(arg) if ret_value is None: if msg_on_error: self.errmsg(msg_on_error) else: self.errmsg("Expecting an integer, got: %s." % str(arg)) pass return None if min_value and ret_value < min_value: self.errmsg("Expecting integer value to be at least %d, got: %d." % (min_value, ret_value)) return None elif max_value and ret_value > max_value: self.errmsg("Expecting integer value to be at most %d, got: %d." % (max_value, ret_value)) return None return ret_value def get_int_noerr(self, arg): """Eval arg and it is an integer return the value. Otherwise return None""" if self.curframe: g = self.curframe.f_globals l = self.curframe.f_locals else: g = globals() l = locals() pass try: val = int(eval(arg, g, l)) except (SyntaxError, NameError, ValueError, TypeError): return None return val def get_int(self, arg, min_value=0, default=1, cmdname=None, at_most=None): """If no argument use the default. If arg is a an integer between least min_value and at_most, use that. Otherwise report an error. If there's a stack frame use that in evaluation.""" if arg is None: return default default = self.get_int_noerr(arg) if default is None: if cmdname: self.errmsg( ("Command '%s' expects an integer; " + "got: %s.") % (cmdname, str(arg))) else: self.errmsg("Expecting a positive integer, got: %s" % str(arg)) pass return None pass if default < min_value: if cmdname: self.errmsg(("Command '%s' expects an integer at least" + " %d; got: %d.") % (cmdname, min_value, default)) else: self.errmsg( ("Expecting a positive integer at least" + " %d; got: %d") % (min_value, default)) pass return None elif at_most and default > at_most: if cmdname: self.errmsg(("Command '%s' expects an integer at most" + " %d; got: %d.") % (cmdname, at_most, default)) else: self.errmsg(("Expecting an integer at most %d; got: %d") % (at_most, default)) pass pass return default def getval(self, arg, locals=None): if not locals: locals = self.curframe.f_locals try: return eval(arg, self.curframe.f_globals, locals) except: t, v = sys.exc_info()[:2] if isinstance(t, str): exc_type_name = t else: exc_type_name = t.__name__ self.errmsg(str("%s: %s" % (exc_type_name, arg))) raise return def ok_for_running(self, cmd_obj, name, nargs): """We separate some of the common debugger command checks here: whether it makes sense to run the command in this execution state, if the command has the right number of arguments and so on. """ if hasattr(cmd_obj, "execution_set"): if not (self.core.execution_status in cmd_obj.execution_set): part1 = "Command '%s' is not available for execution status:" % name mess = Mmisc.wrapped_lines(part1, self.core.execution_status, self.debugger.settings["width"]) self.errmsg(mess) return False pass if self.frame is None and cmd_obj.need_stack: self.intf[-1].errmsg("Command '%s' needs an execution stack." % name) return False if nargs < cmd_obj.min_args: self.errmsg( ("Command '%s' needs at least %d argument(s); " + "got %d.") % (name, cmd_obj.min_args, nargs)) return False elif cmd_obj.max_args is not None and nargs > cmd_obj.max_args: self.errmsg( ("Command '%s' can take at most %d argument(s);" + " got %d.") % (name, cmd_obj.max_args, nargs)) return False return True def process_commands(self): """Handle debugger commands.""" if self.core.execution_status != "No program": self.setup() self.location() pass else: self.list_object = None leave_loop = run_hooks(self, self.preloop_hooks) self.continue_running = False while not leave_loop: try: run_hooks(self, self.precmd_hooks) # bdb had a True return to leave loop. # A more straight-forward way is to set # instance variable self.continue_running. leave_loop = self.process_command() if leave_loop or self.continue_running: break except EOFError: # If we have stacked interfaces, pop to the next # one. If this is the last one however, we'll # just stick with that. FIXME: Possibly we should # check to see if we are interactive. and not # leave if that's the case. Is this the right # thing? investigate and fix. if len(self.debugger.intf) > 1: del self.debugger.intf[-1] self.last_command = "" else: if self.debugger.intf[-1].output: self.debugger.intf[-1].output.writeline("Leaving") raise SystemExit pass break pass pass return run_hooks(self, self.postcmd_hooks) def process_command(self): # process command if len(self.cmd_queue) > 0: current_command = self.cmd_queue[0].strip() del self.cmd_queue[0] else: current_command = self.intf[-1].read_command( self.prompt_str).strip() if "" == current_command and self.intf[-1].interactive: current_command = self.last_command pass pass # Look for comments if "" == current_command: if self.intf[-1].interactive: self.errmsg("No previous command registered, " + "so this is a no-op.") pass return False if current_command is None or current_command[0] == "#": return False try: args_list = arg_split(current_command) except: self.errmsg("bad parse %s: %s" % sys.exc_info()[0:2]) return False for args in args_list: if len(args): while True: if len(args) == 0: return False macro_cmd_name = args[0] if macro_cmd_name not in self.macros: break try: current_command = self.macros[macro_cmd_name][0]( *args[1:]) except TypeError: t, v = sys.exc_info()[:2] self.errmsg("Error expanding macro %s" % macro_cmd_name) return False if self.settings("debugmacro"): print(current_command) pass if isinstance(current_command, list): for x in current_command: if str != type(x): self.errmsg( ("macro %s should return a List " + "of Strings. Has %s of type %s") % ( macro_cmd_name, x, repr(current_command), type(x), )) return False pass first = current_command[0] args = first.split() self.cmd_queue + [current_command[1:]] current_command = first elif type(current_command) == str: args = current_command.split() else: self.errmsg(("macro %s should return a List " + "of Strings or a String. Got %s") % (macro_cmd_name, repr(current_command))) return False pass self.cmd_name = args[0] cmd_name = resolve_name(self, self.cmd_name) self.cmd_argstr = current_command[len(self.cmd_name):].lstrip() if cmd_name: self.last_command = current_command cmd_obj = self.commands[cmd_name] if self.ok_for_running(cmd_obj, cmd_name, len(args) - 1): try: self.current_command = current_command result = cmd_obj.run(args) if result: return result except ( Mexcept.DebuggerQuit, Mexcept.DebuggerRestart, SystemExit, ): # Let these exceptions propagate through raise except: self.errmsg("INTERNAL ERROR: " + traceback.format_exc()) pass pass pass elif not self.settings("autoeval"): self.undefined_cmd(current_command) else: # Autoeval self._saferepr(self.exec_line(current_command)) pass pass pass return False def remove_preloop_hook(self, hook): try: position = self.preloop_hooks.index(hook) except ValueError: return False del self.preloop_hooks[position] return True def setup(self): """Initialization done before entering the debugger-command loop. In particular we set up the call stack used for local variable lookup and frame/up/down commands. We return True if we should NOT enter the debugger-command loop.""" self.forget() if self.settings("dbg_trepan"): self.frame = inspect.currentframe() pass if self.event in ["exception", "c_exception"]: exc_type, exc_value, exc_traceback = self.event_arg else: _, _, exc_traceback = ( None, None, None, ) # NOQA pass if self.frame or exc_traceback: self.stack, self.curindex = get_stack(self.frame, exc_traceback, None, self) self.curframe = self.stack[self.curindex][0] self.thread_name = Mthread.current_thread_name() if exc_traceback: self.list_lineno = traceback.extract_tb(exc_traceback, 1)[0][1] self.list_offset = self.curframe.f_lasti self.list_object = self.curframe else: self.stack = self.curframe = self.botframe = None pass if self.curframe: self.list_lineno = (max( 1, inspect.getlineno(self.curframe) - int(self.settings("listsize") / 2), ) - 1) self.list_offset = self.curframe.f_lasti self.list_filename = self.curframe.f_code.co_filename self.list_object = self.curframe else: self.list_object = None if not exc_traceback: self.list_lineno = None pass # if self.execRcLines()==1: return True # FIXME: do we want to save self.list_lineno a second place # so that we can do 'list .' and go back to the first place we listed? return False def queue_startfile(self, cmdfile): """Arrange for file of debugger commands to get read in the process-command loop.""" expanded_cmdfile = osp.expanduser(cmdfile) is_readable = Mfile.readable(expanded_cmdfile) if is_readable: self.cmd_queue.append("source " + expanded_cmdfile) elif is_readable is None: self.errmsg("source file '%s' doesn't exist" % expanded_cmdfile) else: self.errmsg("source file '%s' is not readable" % expanded_cmdfile) pass return def undefined_cmd(self, cmd): """Error message when a command doesn't exist""" self.errmsg('Undefined command: "%s". Try "help".' % cmd) return def read_history_file(self): """Read the command history file -- possibly.""" histfile = self.debugger.intf[-1].histfile try: import readline readline.read_history_file(histfile) except IOError: pass except ImportError: pass return def write_history_file(self): """Write the command history file -- possibly.""" settings = self.debugger.settings histfile = self.debugger.intf[-1].histfile if settings["hist_save"]: try: import readline try: readline.write_history_file(histfile) except IOError: pass except ImportError: pass pass return def _populate_commands(self): """Create an instance of each of the debugger commands. Commands are found by importing files in the directory 'command'. Some files are excluded via an array set in __init__. For each of the remaining files, we import them and scan for class names inside those files and for each class name, we will create an instance of that class. The set of DebuggerCommand class instances form set of possible debugger commands.""" from trepan.processor import command as Mcommand if hasattr(Mcommand, "__modules__"): return self.populate_commands_easy_install(Mcommand) else: return self.populate_commands_pip(Mcommand, "trepan") def populate_commands_pip(self, Mcommand): cmd_instances = [] eval_cmd_template = "command_mod.%s(self)" for mod_name in Mcommand.__dict__.keys(): if mod_name.startswith("__"): continue import_name = "trepan.processor.command." + mod_name imp = __import__(import_name) if imp.__name__ == base_name: command_mod = imp.processor.command else: if mod_name in ( "info_sub", "set_sub", "show_sub", ): pass try: command_mod = getattr(__import__(import_name), mod_name) except: # Don't need to warn about optional modules if mod_name not in self.optional_modules: print("Error importing %s: %s" % (mod_name, sys.exc_info()[0])) pass continue pass classnames = [ tup[0] for tup in inspect.getmembers(command_mod, inspect.isclass) if ("DebuggerCommand" != tup[0] and tup[0].endswith("Command")) ] for classname in classnames: eval_cmd = eval_cmd_template % classname if False: instance = eval(eval_cmd) cmd_instances.append(instance) else: try: instance = eval(eval_cmd) cmd_instances.append(instance) except: print("Error loading %s from %s: %s" % (classname, mod_name, sys.exc_info()[0])) pass pass pass pass return cmd_instances # This is the most-used way of adding commands def populate_commands_easy_install(self, Mcommand): """ Add files in filesystem to self.commands. If running from source or from an easy_install'd package, this is used. """ cmd_instances = [] for mod_name in Mcommand.__modules__: if mod_name in ( "info_sub", "set_sub", "show_sub", ): pass import_name = "%s.%s" % (Mcommand.__name__, mod_name) try: command_mod = importlib.import_module(import_name) except: if mod_name not in self.optional_modules: print("Error importing %s: %s" % (mod_name, sys.exc_info()[0])) pass continue classnames = [ tup[0] for tup in inspect.getmembers(command_mod, inspect.isclass) if ("DebuggerCommand" != tup[0] and tup[0].endswith("Command")) ] for classname in classnames: if False: instance = getattr(command_mod, classname)(self) cmd_instances.append(instance) else: try: instance = getattr(command_mod, classname)(self) cmd_instances.append(instance) except: print("Error loading %s from %s: %s" % (classname, mod_name, sys.exc_info()[0])) pass pass pass pass return cmd_instances def _populate_cmd_lists(self): """Populate self.lists and hashes: self.commands, and self.aliases, self.category""" self.commands = {} self.aliases = {} self.category = {} # self.short_help = {} for cmd_instance in self.cmd_instances: if not hasattr(cmd_instance, "aliases"): continue alias_names = cmd_instance.aliases cmd_name = cmd_instance.name self.commands[cmd_name] = cmd_instance for alias_name in alias_names: self.aliases[alias_name] = cmd_name pass cat = getattr(cmd_instance, "category") if cat and self.category.get(cat): self.category[cat].append(cmd_name) else: self.category[cat] = [cmd_name] pass # sh = getattr(cmd_instance, 'short_help') # if sh: # self.short_help[cmd_name] = getattr(c, 'short_help') # pass pass for k in list(self.category.keys()): self.category[k].sort() pass return pass
class File(Product, os.PathLike): """A file (or directory) in the local filesystem Parameters ---------- identifier: str or pathlib.Path The path to the file (or directory), can contain placeholders (e.g. {{placeholder}}) """ def __init__(self, identifier, client=None): super().__init__(identifier) self._client = client self._repr = Repr() self._repr.maxstring = 40 def _init_identifier(self, identifier): if not isinstance(identifier, (str, Path)): raise TypeError('File must be initialized with a str or a ' 'pathlib.Path') return Placeholder(str(identifier)) @property def _path_to_file(self): return Path(str(self._identifier)) @property def _path_to_metadata(self): name = f'.{self._path_to_file.name}.metadata' return self._path_to_file.with_name(name) def fetch_metadata(self): # to keep compatibility with ploomber<0.10 old_name = Path(str(self._path_to_file) + '.source') if old_name.is_file(): shutil.move(old_name, self._path_to_metadata) empty = dict(timestamp=None, stored_source_code=None) # but we have no control over the stored code, it might be missing # so we check, we also require the file to exists: even if the # .source file exists, missing the actual data file means something # if wrong anf the task should run again if (self._path_to_metadata.exists() and self._path_to_file.exists()): content = self._path_to_metadata.read_text() try: parsed = json.loads(content) except json.JSONDecodeError as e: raise ValueError('Error loading JSON metadata ' f'for {self!r} stored at ' f'{str(self._path_to_metadata)!r}') from e else: # TODO: validate 'stored_source_code', 'timestamp' exist return parsed else: return empty def save_metadata(self, metadata): self._path_to_metadata.write_text(json.dumps(metadata)) def _delete_metadata(self): if self._path_to_metadata.exists(): os.remove(str(self._path_to_metadata)) def exists(self): return self._path_to_file.exists() def delete(self, force=False): # force is not used for this product but it is left for API # compatibility if self.exists(): self.logger.debug('Deleting %s', self._path_to_file) if self._path_to_file.is_dir(): shutil.rmtree(str(self._path_to_file)) else: os.remove(str(self._path_to_file)) else: self.logger.debug('%s does not exist ignoring...', self._path_to_file) def __repr__(self): # do not shorten, we need to process the actual path path = Path(self._identifier.best_repr(shorten=False)) # if absolute, try to show a shorter version, if possible if path.is_absolute(): try: path = path.relative_to(Path('.').resolve()) except ValueError: # happens if the path is not a file/folder within the current # working directory pass content = self._repr.repr(str(path)) return f'{type(self).__name__}({content})' @property def client(self): if self._client is None: if self._task is None: raise ValueError('Cannot obtain client for this product, ' 'the constructor did not receive a client ' 'and this product has not been assigned ' 'to a DAG yet (cannot look up for clients in' 'dag.clients)') self._client = self.task.dag.clients.get(type(self)) return self._client def download(self): if (self.client is not None and not self._path_to_metadata.exists() and not self._path_to_file.exists()): self.logger.info('Downloading %s...', self._path_to_file) metadata = str(self._path_to_metadata) file_ = str(str(self._path_to_file)) if self.client._remote_exists( metadata) and self.client._remote_exists(file_): self.client.download(metadata) self.client.download(file_) def upload(self): # only upload when we have complete info (product + metadata) if (self.client is not None and self._path_to_metadata.exists() and self._path_to_file.exists()): self.logger.info('Uploading %s...', self._path_to_file) self.client.upload(str(self._path_to_metadata)) self.client.upload(str(self._path_to_file)) def __fspath__(self): """ Abstract method defined in the os.PathLike interface, enables this to work: ``import pandas as pd; pd.read_csv(File('file.csv'))`` """ return str(self)