def main(): assert wpull.application.builder path = os.path.join( os.path.dirname(os.path.abspath(__file__)), 'scripting_interfaces_include.rst' ) print(path) with open(path, 'w') as out_file: out_file.write('.. This document was automatically generated.\n') out_file.write(' DO NOT EDIT!\n\n') items = sorted( wpull.application.plugin.global_interface_registry.items(), key=lambda item: item[0].value ) for hook_name, (function, category) in items: hook_name_str = inspect.getmodule(hook_name).__name__ + '.' + str(hook_name) function_name_str = inspect.getmodule(function).__name__ + '.' + function.__qualname__ out_file.write( ':py:attr:`{} <{}>`\n'.format(hook_name, hook_name_str) ) out_file.write( ' {} Interface: :py:meth:`{} <{}>`\n\n'.format( category.value, function.__qualname__, function_name_str) )
def get_or_run_notebook_experiment(name, exp_dict, display_module = True, force_compute = False, **notebook_experiment_record_kwargs): """ Get the latest experiment with the given name, :param name: Name of the experiment :param exp_dict: Dictionary of experiments to chose from :param force_compute: Recompute the experiment no matter what :param notebook_experiment_record_kwargs: :return: """ exp_id = get_latest_experiment_identifier(name=name) recompute = exp_id is None or force_compute if display_module: func = exp_dict[name] if hasattr(inspect.getmodule(func), '__file__'): module_rel_path = inspect.getmodule(func).__file__ if module_rel_path.endswith('.pyc'): module_rel_path = module_rel_path[:-1] module_name = inspect.getmodule(func).__name__ server_path = get_local_server_dir() rel_path = get_relative_path(module_rel_path, server_path) if recompute: display(HTML("Running Experiment %s from module <a href = '/edit/%s' target='_blank'>%s</a>" % (name, rel_path, module_name))) else: display(HTML("Showing Completed Experiment %s from module <a href = '/edit/%s' target='_blank'>%s</a>" % (exp_id, rel_path, module_name))) if recompute: exp = run_notebook_experiment(name, exp_dict, **notebook_experiment_record_kwargs) else: exp = load_experiment(exp_id) return exp
def _build_widget_subclass(cls): """ Build the DataViewer subclass for this viewer """ props = CustomWidgetBase._property_set + list(cls.ui.keys()) widget_dict = {'LABEL': cls.name, 'ui': cls.ui, 'coordinator_cls': cls, '_property_set': props} widget_dict.update(**dict((k, FormDescriptor(k)) for k in cls.ui)) widget_cls = type('%sWidget' % cls.__name__, (CustomWidgetBase,), widget_dict) cls._widget_cls = widget_cls qt_client.add(widget_cls) # add new classes to module namespace # needed for proper state saving/restoring for c in [widget_cls, cls]: w = getattr(getmodule(ViewerState), c.__name__, None) if w is not None: raise RuntimeError("Duplicate custom viewer detected %s" % c) setattr(getmodule(ViewerState), c.__name__, c)
def _load_plugin_from_info(cls, info, root): if not cls.validate_info(info): logger.warn('The module info is not valid.\n\t{}'.format(info)) return None, None module = info["module"] name = info["name"] requirements = info.get("requirements", []) sys.path.append(root) (fp, pathname, desc) = imp.find_module(module, [root, ]) try: cls._install_deps(info) tmp = imp.load_module(module, fp, pathname, desc) sys.path.remove(root) candidate = None for _, obj in inspect.getmembers(tmp): if inspect.isclass(obj) and inspect.getmodule(obj) == tmp: logger.debug(("Found plugin class:" " {}@{}").format(obj, inspect.getmodule(obj)) ) candidate = obj break if not candidate: logger.debug("No valid plugin for: {}".format(module)) return module = candidate(info=info) repo_path = root module._repo = Repo(repo_path) except InvalidGitRepositoryError: logger.debug("The plugin {} is not in a Git repository".format(module)) module._repo = None except Exception as ex: logger.error("Exception importing {}: {}".format(module, ex)) logger.error("Trace: {}".format(traceback.format_exc())) return None, None return name, module
def handle_module(module): print_tex("\\pagebreak") print_heading(1, "Module %s" % module.__name__) print_tex(inspect.getdoc(module)) functions = {} data = {} for obj_name in dir(module): obj = getattr(module, obj_name) if inspect.ismodule(obj) or inspect.isbuiltin(obj) or \ (inspect.isclass(obj) and \ inspect.getmodule(obj) != module) or \ (inspect.isfunction(obj) and \ inspect.getmodule(obj) != module) or \ isinstance(obj, __future__._Feature) or \ obj_name.startswith('_'): continue if obj_name.startswith('__') and obj_name.endswith('__'): pass elif inspect.isclass(obj): handle_class(obj) elif inspect.isfunction(obj): functions[str_function(obj)] = inspect.getdoc(obj) else: data[obj_name] = str(obj) if functions: print_heading(2, "Functions") print_desc(functions) if data: print_heading(2, "Data") print_desc(data)
def _from_module(self, module, object): """ Return true if the given object is defined in the given module. """ if module is None: return True elif inspect.isfunction(object): return module.__dict__ is object.__globals__ elif inspect.isbuiltin(object): return module.__name__ == object.__module__ elif inspect.isclass(object): return module.__name__ == object.__module__ elif inspect.ismethod(object): # This one may be a bug in cython that fails to correctly set the # __module__ attribute of methods, but since the same error is easy # to make by extension code writers, having this safety in place # isn't such a bad idea return module.__name__ == object.__self__.__class__.__module__ elif inspect.getmodule(object) is not None: return module is inspect.getmodule(object) elif hasattr(object, '__module__'): return module.__name__ == object.__module__ elif isinstance(object, property): return True # [XX] no way not be sure. elif inspect.ismethoddescriptor(object): # Unbound PyQt signals reach this point in Python 3.4b3, and we want # to avoid throwing an error. See also http://bugs.python.org/issue3158 return False else: raise ValueError("object must be a class or function, got %r" % object)
def unload_extension(self, name): lib = self.extensions.get(name) if lib is None: return # find all references to the module # remove the cogs registered from the module for cogname, cog in self.cogs.copy().items(): if inspect.getmodule(cog) is lib: self.remove_cog(cogname) # first remove all the commands from the module for command in self.commands.copy().values(): if command.module is lib: command.module = None if isinstance(command, GroupMixin): command.recursively_remove_all_commands() self.remove_command(command.name) # then remove all the listeners from the module for event_list in self.extra_events.copy().values(): remove = [] for index, event in enumerate(event_list): if inspect.getmodule(event) is lib: remove.append(index) for index in reversed(remove): del event_list[index] # finally remove the import.. del lib del self.extensions[name] del sys.modules[name]
def print_out(message, color, file): stack_ = stack() # Interestingly the if statement below is not executed when excepting KeyboardInterrupts. Weird. # To prevent a crash we assume the module's name is 'Unknown' module = "Unknown" if getmodule(stack_[2][0]) == None: for i in stack_[2:]: if getmodule(i[0]) != None: try: module = CACHED_MODULES[i[0].f_code] except KeyError: module = getmodule(i[0]).__name__ CACHED_MODULES[i[0].f_code] = module else: try: module = CACHED_MODULES[stack_[2][0].f_code] except KeyError: module = getmodule(stack_[2][0]).__name__ CACHED_MODULES[stack_[2][0].f_code] = module if not print_fallback: print( "[%s] %s: %s%s\033[0m" % (strftime("%H:%M:%S"), module, color, message), file=file) file.flush() else: print("[%s] %s: %s" % (strftime("%H:%M:%S"), module, message))
def _extract_t (_target) : if _target is None : _t = None elif hasattr (_target, '_t') : _t = getattr (_target, '_t') elif inspect.ismethod (_target) : if hasattr (_target.im_func, '_t') : _t = getattr (_target.im_func, '_t') elif _target.im_self is not None and hasattr (_target.im_self, '_t') : _t = getattr (_target.im_self, '_t') else : _t = _extract_t (_target.im_class) elif inspect.isfunction (_target) : _t = _extract_t (inspect.getmodule (_target)) elif inspect.isclass (_target) : _t = _extract_t (inspect.getmodule (_target)) elif inspect.ismodule (_target) : _t = None elif isinstance (_target, object) : _t = _extract_t (_target.__class__) else : _t = None return _t
def getMethods(self): """Get all methods of this class.""" methods = [] # remove the security proxy, so that `attr` is not proxied. We could # unproxy `attr` for each turn, but that would be less efficient. # # `getPermissionIds()` also expects the class's security checker not # to be proxied. klass = removeSecurityProxy(self.context) for name, attr, iface in klass.getMethodDescriptors(): entry = {'name': name, 'signature': "(...)", 'doc': renderText(attr.__doc__ or '', inspect.getmodule(attr)), 'interface': getInterfaceInfo(iface)} entry.update(getPermissionIds(name, klass.getSecurityChecker())) methods.append(entry) for name, attr, iface in klass.getMethods(): entry = {'name': name, 'signature': getFunctionSignature(attr), 'doc': renderText(attr.__doc__ or '', inspect.getmodule(attr)), 'interface': getInterfaceInfo(iface)} entry.update(getPermissionIds(name, klass.getSecurityChecker())) methods.append(entry) return methods
def __GetTestCaseName(self): # default value is __main__ ,if we find ,we do not use this stks = inspect.stack() for i in xrange(0,len(stks)-1): frm = stks[i] mm = inspect.getmodule(frm[0]) fc = frm[0].f_code fn = frm[3] if fn.lower().startswith('test_'): # if we find the test_ case name ,so we should test whether it is the XUnitCase class functions, # if so we return the whole module.class:function format _cls = fc.co_varnames[0] inst = frm[0].f_locals[_cls] # it is the class we search for ,so we return it if issubclass(inst.__class__, XUnitCase): cn = cls.GetClassName(inst.__class__) + ':' + fn return cn # we do not get the name, so we should get the class for i in xrange(0,len(stks)-1): frm = stks[i] mm = inspect.getmodule(frm[0]) fc = frm[0].f_code fn = frm[3] _cls = fc.co_varnames[0] inst = frm[0].f_locals[_cls] # it is the class we search for ,so we return it if issubclass(inst.__class__, XUnitCase): cn = cls.GetClassName(inst.__class__) return cn # nothing find ,so we should return __main__ return '__main__'
def get_dependency_files(obj, top, n=10): """Gets a list of files that obj depends on in some way that are under the directory top""" # Check to see if we're at the bottom of the recursion if n == 0: return [] # Get the object's module mod = inspect.getmodule(obj) try: # Then the file of the module. Do this so that decorated functions # are traced to the place they are first defined, not to the wrap filename = inspect.getsourcefile(mod) except TypeError: filename = None else: if filename: filename = os.path.abspath(filename) if os.path.commonprefix([filename, top]) != top: return [] out = [] if filename: out.append(filename) for name, attr in mod.__dict__.items(): attrmod = inspect.getmodule(attr) if attrmod != mod: out.extend(get_dependency_files(attr, top, n - 1)) return list(set(out))
def install(self, spec, prefix): # Config questions consist of: # # <PRESS ENTER TO CONTINUE> # Enter perl path # Enter where repeatmasker is being configured from # Enter trf path # Add a Search Engine: # 1. CrossMatch # 2. RMBlast - NCBI Blast with RepeatMasker extensions # 3. WUBlast/ABBlast (required by DupMasker) # 4. HMMER3.1 & DFAM # 5. Done # Enter RMBlast path # Do you want RMBlast to be your default search engine for # Repeatmasker? (Y/N) # Add a Search Engine: Done config_answers = ['\n', '%s\n' % self.spec['perl'].command.path, '%s\n' % self.stage.source_path, '%s\n' % self.spec['trf'].prefix.bin.trf, '2\n', '%s\n' % self.spec['ncbi-rmblastn'].prefix.bin, 'Y\n', '5\n'] config_answers_filename = 'spack-config.in' with open(config_answers_filename, 'w') as f: f.writelines(config_answers) with open(config_answers_filename, 'r') as f: inspect.getmodule(self).perl('configure', input=f) install_tree('.', prefix.bin)
def meson(self, spec, prefix): """Runs ``meson`` in the build directory""" options = [os.path.abspath(self.root_mesonlists_dir)] options += self.std_meson_args options += self.meson_args() with working_dir(self.build_directory, create=True): inspect.getmodule(self).meson(*options)
def _from_module(self, module, object): """ Return true if the given object is defined in the given module. """ if module is None: return True elif inspect.isfunction(object): return module.__dict__ is object.__globals__ elif inspect.isbuiltin(object): return module.__name__ == object.__module__ elif inspect.isclass(object): return module.__name__ == object.__module__ elif inspect.ismethod(object): # This one may be a bug in cython that fails to correctly set the # __module__ attribute of methods, but since the same error is easy # to make by extension code writers, having this safety in place # isn't such a bad idea return module.__name__ == object.__self__.__class__.__module__ elif inspect.getmodule(object) is not None: return module is inspect.getmodule(object) elif hasattr(object, "__module__"): return module.__name__ == object.__module__ elif isinstance(object, property): return True # [XX] no way not be sure. else: raise ValueError("object must be a class or function, got %r" % object)
def install(self, spec, prefix): """Make the install targets""" with working_dir(self.build_directory): if self.generator == 'Unix Makefiles': inspect.getmodule(self).make(*self.install_targets) elif self.generator == 'Ninja': inspect.getmodule(self).ninja(*self.install_targets)
def cmake(self, spec, prefix): """Runs ``cmake`` in the build directory""" options = [os.path.abspath(self.root_cmakelists_dir)] options += self.std_cmake_args options += self.cmake_args() with working_dir(self.build_directory, create=True): inspect.getmodule(self).cmake(*options)
def makeMetricList(outfile): f = open(outfile, 'w') print("=================", file=f) print("Available metrics", file=f) print("=================", file=f) print("Core LSST MAF metrics", file=f) print("=====================", file=f) print(" ", file=f) for name, obj in inspect.getmembers(metrics): if inspect.isclass(obj): modname = inspect.getmodule(obj).__name__ if modname.startswith('lsst.sims.maf.metrics'): link = "lsst.sims.maf.metrics.html#%s.%s" % (modname, obj.__name__) simpledoc = inspect.getdoc(obj).split('\n')[0] print("- `%s <%s>`_ \n \t %s" % (name, link, simpledoc), file=f) print(" ", file=f) if mafContribPresent: print("Contributed mafContrib metrics", file=f) print("==============================", file=f) print(" ", file=f) for name, obj in inspect.getmembers(mafContrib): if inspect.isclass(obj): modname = inspect.getmodule(obj).__name__ if modname.startswith('mafContrib') and name.endswith('Metric'): link = 'http://github.com/lsst-nonproject/sims_maf_contrib/tree/master/mafContrib/%s.py' % (modname.split('.')[-1]) simpledoc = inspect.getdoc(obj).split('\n')[0] print("- `%s <%s>`_ \n \t %s" % (name, link, simpledoc), file=f) print(" ", file=f)
def _get_caller_info(self): # pull the frames from the current stack, reversed, # since it's easier to find the first siftlog frame frames = [ (idx, frm, inspect.getmodule(frm[0])) for idx, frm in enumerate(reversed(inspect.stack())) ] # travel the stack from behind, looking for the first siftlog frame res = itertools.dropwhile(lambda (idx, f, m): m and m.__name__ != 'siftlog' , frames) # the first siftlog frame from back of the stack siftlog_frame = res.next() # its index siftlog_frame_idx = siftlog_frame[0] if siftlog_frame_idx == 0: # there is no caller module (console) return None # the frame before this one is what's calling the logger frm = frames[siftlog_frame_idx - 1][1] # now get the caller info mod = inspect.getmodule(frm[0]) return { 'file': frm[1], 'line_no' : frm[2], 'method' : frm[3], 'module' : mod.__name__, }
def doc_module(self, mod): if not inspect.ismodule(mod): return self.emit('''<h1>%s</h1>''' % mod.__name__) self.emit('''<p class="date">Generated %s</p>''' % \ datetime.datetime.now().ctime()) self.emit_pre(mod.__doc__) classes = inspect.getmembers(mod, inspect.isclass) if classes: self.emit('''<h2>Classes</h2>''') self.emit('''<dl class="class">''') for name, cls in classes: # Only document classes defined in this module if inspect.getmodule(cls) == mod: self.doc_class(cls) self.emit('''</dl>''') funcs = inspect.getmembers(mod, inspect.isfunction) if funcs: self.emit('''<h2>Functions</h2>''') self.emit('''<dl class="function">''') for name, func in funcs: # Only document functions defined in this module if inspect.getmodule(func) == mod: self.doc_function(func) self.emit('''</dl>''')
def get_caller(*caller_class, **params): """ This is obsolete and references are being removed """ (frame, file, line, func, contextlist, index) = inspect.stack()[1] try: class_name = frame.f_locals["self"].__class__.__name__ except: class_name = None if class_name: name = class_name + '.' elif caller_class != (): # pragma: no cover name = inspect.getmodule(caller_class[0]).__name__ + '.' elif hasattr(inspect.getmodule(frame), '__name__'): name = inspect.getmodule(frame).__name__ + '.' else: # pragma: no cover name = '' if func == '__init__' and class_name: name = class_name + '()' elif name == '__main__.': # pragma: no cover name = func + '()' else: name += func + '()' if 'persist_place' in params: get_caller._Persist_Place = params['persist_place'] log = params.get('log', logging.getLogger()) if get_caller._Persist_Place or params.get('place') or log.isEnabledFor(logging.DEBUG): name += ' [+{} {}]'.format(line, os.path.basename(file)) return name
def debug(*args, **kwargs): if not config.debug: return global _count trace = kwargs.pop('trace', False) if trace: frame = inspect.stack()[1] modules = [] stacks = inspect.stack()[1:] for frame in stacks: name = inspect.getmodule(frame[0]).__name__ if name != '__main__': modules.append(name) if not modules: modules.append('__main__') modules = '->'.join(x for x in reversed(modules)) else: module = inspect.getmodule(inspect.stack()[1][0]) if module: modules = module.__name__ else: modules = '' def p(): print('{}:{}: [{}]:'.format( _count, current_thread().name, modules), *args, **kwargs) p() # kwargs['file'] = _debugLogFile # p() _count += 1
def __getattribute__(self, name): import warnings logger_name = 'deluge' stack = inspect.stack() stack.pop(0) # The logging call from this module module_stack = stack.pop(0) # The module that called the log function caller_module = inspect.getmodule(module_stack[0]) # In some weird cases caller_module might be None, try to continue caller_module_name = getattr(caller_module, '__name__', '') warnings.warn_explicit(DEPRECATION_WARNING, DeprecationWarning, module_stack[1], module_stack[2], caller_module_name) if caller_module: for member in stack: module = inspect.getmodule(member[0]) if not module: continue if module.__name__ in ('deluge.plugins.pluginbase', 'deluge.plugins.init'): logger_name += '.plugin.%s' % caller_module_name # Monkey Patch The Plugin Module caller_module.log = logging.getLogger(logger_name) break else: logging.getLogger(logger_name).warning( "Unable to monkey-patch the calling module's `log` attribute! " "You should really update and rebuild your plugins..." ) return getattr(logging.getLogger(logger_name), name)
def _import_override(self, name, globals={}, locals={}, fromlist=[], level=-1): # Get the caller module, using globals (like the original __import # does) try: if globals is None: raise KeyError module = globals['__name__'] except KeyError: # Another method of getting the caller module, using the stack caller = inspect.currentframe().f_back module = inspect.getmodule(caller) # Some frames might not be associated to a module, because of the # use of exec for instance; we just skip these until we reach a # valid one while module is None: caller = caller.f_back if caller is None: break module = inspect.getmodule(caller) if module: module = module.__name__ # Get the Package from the module name if module: importing_pkg = None current = self._currently_importing_package if (current is not None and current.prefix and module.startswith(current.prefix + current.codepath)): importing_pkg = current else: for pkg in itertools.chain( self._package_list.itervalues(), self._available_packages.itervalues()): if (pkg.prefix is not None and module.startswith(pkg.prefix + pkg.codepath)): importing_pkg = pkg break # If we are importing directly from a package if importing_pkg is not None: old_current = self._currently_importing_package self._currently_importing_package = importing_pkg result = importing_pkg.import_override( self._orig_import, name, globals, locals, fromlist, level, package_importing_directly=True) self._currently_importing_package = old_current return result # If we are doing it indirectly (from other stuff imported from a # package) elif self._currently_importing_package is not None: return self._currently_importing_package.import_override( self._orig_import, name, globals, locals, fromlist, level, package_importing_directly=False) # Else, this is not from a package return self._orig_import(name, globals, locals, fromlist, level)
def get_caller(*caller_class, **params): """ Provide a convenient was to report your class and function. Common practice is to use: from taskforce.utils import get_caller as my You can then log top-of-stack calling details using something like this: log.debug("%s: Nothing to report", my(self)) Outside a class context, my() will report the current module and function name. If the param "place" is set, logging will include the file and line number of the call. This happens automatically if the loglevel of the 'log' param (default is the root logger) is DEBUG. log.debug("%s: Nothing to report", my(self, place=True)) If the param "persist_place" is set, that value will persist across multiple calls until the next call with "persist_place" is set. For example: log.debug("%s: Nothing to report", my(self, persist_place=True)) . . . log.debug("%s: Still nothing to report", my(self)) . . . log.debug("%s: Nothing to report", my(self, persist_place=False)) """ (frame, file, line, func, contextlist, index) = inspect.stack()[1] try: class_name = frame.f_locals["self"].__class__.__name__ except: class_name = None if class_name: name = class_name + '.' elif caller_class != (): # pragma: no cover name = inspect.getmodule(caller_class[0]).__name__ + '.' elif hasattr(inspect.getmodule(frame), '__name__'): name = inspect.getmodule(frame).__name__ + '.' else: # pragma: no cover name = '' if func == '__init__' and class_name: name = class_name + '()' elif name == '__main__.': # pragma: no cover name = func + '()' else: name += func + '()' if 'persist_place' in params: get_caller._Persist_Place = params['persist_place'] log = params.get('log', logging.getLogger()) if get_caller._Persist_Place or params.get('place') or log.isEnabledFor(logging.DEBUG): name += ' [+{} {}]'.format(line, os.path.basename(file)) return name
def finalizePresetLookup(): lookup = {} for k in __preset_staging_visited: del __preset_staging[k] for pw in __preset_staging.itervalues(): if type(pw) is TreeDict: del pw["__defaultpresettree__"] modifyGlobalDefaultTree(pw) continue preset_tree_name = __presetTreeName(pw.name) ret = lookup.setdefault(preset_tree_name, pw) if ret is not pw: if ret.action is not pw.action: warnings.warn( ("Possible duplicate preset name '%s'; \n " " original in module '%s'; ignoring " "duplicate from module %s.") % (pw.name, inspect.getmodule(ret.action), inspect.getmodule(pw.action)) ) # Give everything over to the main preset thing assert __preset_lookup == {} __preset_lookup.update(lookup)
def dump_object(name, tmp_obj): print ">>>>>>>>>>>>>>>>>>>>>>>>>>", name, tmp_obj print describe(tmp_obj) # From line 921, method docmodule: classes = [] for key, value in inspect.getmembers(tmp_obj, inspect.isclass): if (inspect.getmodule(value) or tmp_obj) is tmp_obj: classes.append((key, value)) dump_object(key, value) funcs = [] for key, value in inspect.getmembers(tmp_obj, inspect.isroutine): if inspect.isbuiltin(value) or inspect.getmodule(value) is tmp_obj: funcs.append((key, value)) data = [] for key, value in inspect.getmembers(tmp_obj, isdata): if key not in ['__builtins__', '__doc__']: data.append((key, value)) methods = [] for key, value in inspect.getmembers(tmp_obj, inspect.ismethod): if key not in ['__builtins__', '__doc__']: methods.append((key, value)) print "C:", classes print "\nF:", funcs print "\nD:", data print "\nM:", methods for m in methods: print inspect.getargspec(m[1]), inspect.getdoc(m[1]), inspect.getcomments(m[1]) print "<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<"
def configure(self, spec, prefix): # Overriding default configure method in order to cater to interactive # Build.pl self.build_method = 'Build.PL' self.build_executable = Executable( join_path(self.stage.source_path, 'Build')) # Config questions consist of: # Do you want to run the Bio::DB::GFF or Bio::DB::SeqFeature::Store # live database tests? y/n [n] # # Install [a]ll BioPerl scripts, [n]one, or choose groups # [i]nteractively? [a] # # Do you want to run tests that require connection to servers across # the internet (likely to cause some failures)? y/n [n] # # Eventually, someone can add capability for the other options, but # the current answers are the most practical for a spack install. config_answers = ['n\n', 'a\n', 'n\n'] config_answers_filename = 'spack-config.in' with open(config_answers_filename, 'w') as f: f.writelines(config_answers) with open(config_answers_filename, 'r') as f: inspect.getmodule(self).perl('Build.PL', '--install_base=%s' % self.prefix, input=f)
def show_module(mod): for key, value in inspect.getmembers(mod, inspect.ismodule): print 'import ' + key for key, value in inspect.getmembers(mod, lambda obj: is_imported_obj(mod, obj)): print 'from ' + value.__module__ + ' import ' + key print known_globals = set([]) for name, value in get_data(mod): print name, '=', repr(value) known_globals.add(name) print for key, value in inspect.getmembers(mod, inspect.isclass): if inspect.getmodule(value) == mod: show_class(key, value, known_globals) print for key, value in inspect.getmembers(mod, inspect.isfunction): if inspect.getmodule(value) == mod: show_function(key, value, known_globals) print
def _load_plugin(root, filename): logger.debug("Loading plugin: {}".format(filename)) fpath = os.path.join(root, filename) with open(fpath, "r") as f: info = json.load(f) logger.debug("Info: {}".format(info)) sys.path.append(root) module = info["module"] name = info["name"] (fp, pathname, desc) = imp.find_module(module, [root]) try: tmp = imp.load_module(module, fp, pathname, desc) sys.path.remove(root) candidate = None for _, obj in inspect.getmembers(tmp): if inspect.isclass(obj) and inspect.getmodule(obj) == tmp: logger.debug(("Found plugin class:" " {}@{}").format(obj, inspect.getmodule(obj))) candidate = obj break if not candidate: logger.debug("No valid plugin for: {}".format(filename)) return module = candidate(info=info) try: repo_path = root module._repo = Repo(repo_path) except InvalidGitRepositoryError: module._repo = None except Exception as ex: logger.error("Exception importing {}: {}".format(filename, ex)) logger.error("Trace: {}".format(traceback.format_exc())) return None, None return name, module
def helpNonVerbose(thing, title='Python Library Documentation: %s', forceload=0): """ Utility method to return python help in the form of a string thing - str or unicode name to get help on title - format string for help result forceload - argument to pydoc.resolve, force object's module to be reloaded from file returns formated help string """ result = "" try: thingStr = thing.encode(cmds.about(codeset=True)) except: thingStr = str(thing) try: # Possible two-stage object resolution! # Sometimes we get docs for strings, other times for objects # try: object, name = pydoc.resolve(thingStr, forceload) except: # Get an object from a string thingObj = eval(thingStr, sys.modules['__main__'].__dict__) object, name = pydoc.resolve(thingObj, forceload) desc = pydoc.describe(object) module = inspect.getmodule(object) if name and '.' in name: desc += ' in ' + name[:name.rfind('.')] elif module and module is not object: desc += ' in module ' + module.__name__ doc = None text = pydoc.TextDoc() if not (inspect.ismodule(object) or inspect.isclass(object) or inspect.isroutine(object) or inspect.isgetsetdescriptor(object) or inspect.ismemberdescriptor(object) or isinstance(object, property)): # If the passed object is a piece of data or an instance, # document its available methods instead of its value. object = type(object) desc += ' object' # if the object is a maya command without a proper docstring, # then tack on the help for it elif module is cmds and inspect.isroutine(object): try: if len(object.__doc__) == 0: doc = cmds.help(object.__name__) except: pass if not doc: doc = text.document(object, name) result = pydoc.plain(title % desc + '\n\n' + doc) # Remove multiple empty lines result = "\n".join( [line for line in result.splitlines() if line.strip()]) except: pass return result
def _spider_function(function, session, pickles={}): """ Takes a function and global variables referenced in an environment and recursively finds dependencies required in order to execute the function. This includes references to classes, libraries, variables, functions, etc. Parameters ---------- function: function a function referenced in an environment session: dictionary variables referenced from a seperate environment; i.e. globals() pickles: dictionary holds the variables needed to execute the function Returns ------- imports: list list of import statements required for the function to execute source: string source code of the function pickles: dictionary dictionary of variable names and their values as pickled strings """ if '_objects_seen' not in pickles: pickles['_objects_seen'] = [] pickles['_objects_seen'].append(str(function)) imports = [] modules = {} source = "# code for %s\n" % (str(function)) if isinstance(function, types.ModuleType): logging.debug( "object (%s) is a `types.ModuleType`. skipping _get_source" % str(function)) pass else: source += _get_source(function) + '\n' logging.debug("finding objects that need to be serialized") for varname in _get_naked_loads(function): logging.debug("\t%s" % varname) if varname in pickles['_objects_seen']: logging.debug("\t%s has already been processed. skipping." % varname) continue pickles['_objects_seen'].append(varname) if varname not in session: logging.debug("\t%s not found in session. skipping." % varname) continue obj = session[varname] # checking to see if this is an instance of an object if hasattr(obj, "__name__") == False: logging.debug( "\t%s has attribute __name__, performing serialization checks." % varname) if _is_tensor(obj): logging.debug("\t%s is from tensorflow skipping." % varname) continue elif _is_spark(obj): logging.debug( "\t%s is from spark. serializing using `dumps_spark_to_base64`." % varname) pickles[varname] = terragon.dumps_spark_to_base64( session['sc'], obj) elif _is_pom(obj): logging.debug( "\t%s is from pomegranate. serializing using `dumps_pom_to_base64`." % varname) pickles[varname] = terragon.dumps_pom_to_base64(obj) else: logging.debug( "\tno special serialization requirement detected for %s. using dumps_to_base64." % varname) pickles[varname] = terragon.dumps_to_base64(obj) elif hasattr(obj, "__module__"): logging.debug("\tobject %s has attribute __module__" % varname) if obj.__module__ == "__main__": logging.debug("\t%s is in module __main__" % varname) new_imports, new_source, new_pickles, new_modules = _spider_function( obj, session, pickles) source += new_source + '\n' imports += new_imports pickles.update(new_pickles) modules.update(new_modules) else: logging.debug( "\t%s is a submodule (not __main__). extracting/saving source code and import statements." % varname) modules.update(_extract_module(obj.__module__)) ref = inspect.getmodule(obj).__name__ if hasattr(obj, "func_name") and obj.__name__ != varname: imports.append("from %s import %s as %s" % (ref, obj.__name__, varname)) else: # we need to figure out how to import this library. i'm not # sure exactly what the right way to get the module and # class name, but this works just fine try: import_statement = "from %s import %s" % (ref, varname) exec(import_statement, locals()) imports.append(import_statement) except: try: import_statement = "from %s import %s" % ( ref, obj.__class__.__name__) exec(import_statement, locals()) imports.append(import_statement) except: pass elif isinstance(obj, types.ModuleType): logging.debug( "\tobject %s is a `types.ModuleType`, extracting/saving source code and import statements." % varname) modules.update(_extract_module(obj.__name__)) if obj.__name__ != varname: imports.append("import %s as %s" % (obj.__name__, varname)) else: imports.append("import %s" % (varname)) else: logging.debug( "\tno special cases detected for %s. pickling using dumps_to_base64" % varname) # catch all. if all else fails, pickle it pickles[varname] = terragon.dumps_to_base64(obj) return imports, source, pickles, modules
def install(self, spec, prefix): """Install the package.""" args = self.install_args(spec, prefix) inspect.getmodule(self).scons('install', *args)
def process(self, module): """Run a processing module. @param module: processing module to run. @return: results generated by module. """ # Initialize the specified processing module. try: current = module(self.results) except: log.exception("Failed to load the processing module " '"{0}":'.format(module)) return # Extract the module name. module_name = inspect.getmodule(current).__name__ if "." in module_name: module_name = module_name.rsplit(".", 1)[1] try: options = self.cfg.get(module_name) except CuckooOperationalError: log.debug("Processing module %s not found in configuration file", module_name) return None # If the processing module is disabled in the config, skip it. if not options.enabled: return None # Give it path to the analysis results. current.set_path(self.analysis_path) # Give it the analysis task object. current.set_task(self.task) # Give it the options from the relevant processing.conf section. current.set_options(options) try: # Run the processing module and retrieve the generated data to be # appended to the general results container. log.debug( 'Executing processing module "%s" on analysis at ' '"%s"', current.__class__.__name__, self.analysis_path) pretime = datetime.now() data = current.run() posttime = datetime.now() timediff = posttime - pretime self.results["statistics"]["processing"].append({ "name": current.__class__.__name__, "time": float("%d.%03d" % (timediff.seconds, timediff.microseconds / 1000)), }) # If succeeded, return they module's key name and the data to be # appended to it. return {current.key: data} except CuckooDependencyError as e: log.warning( 'The processing module "%s" has missing dependencies: %s', current.__class__.__name__, e) except CuckooProcessingError as e: log.warning( 'The processing module "%s" returned the following ' "error: %s", current.__class__.__name__, e) except: log.exception('Failed to run the processing module "%s":', current.__class__.__name__) return None
def log(self, **kwargs): """ Simple Example: from tendenci.apps.event_logs.models import EventLog EventLog.objects.log() If you have a Tendenci Base Object, then use the following EventLog.objects.log(instance=obj_local_var) """ request, user, instance = None, None, None stack = inspect.stack() # If the request is not present in the kwargs, we try to find it # by inspecting the stack. We dive 3 levels if necessary. - JMO 2012-05-14 if 'request' in kwargs: request = kwargs['request'] else: if 'request' in inspect.getargvalues(stack[1][0]).locals: request = inspect.getargvalues(stack[1][0]).locals['request'] elif 'request' in inspect.getargvalues(stack[2][0]).locals: request = inspect.getargvalues(stack[2][0]).locals['request'] elif 'request' in inspect.getargvalues(stack[3][0]).locals: request = inspect.getargvalues(stack[3][0]).locals['request'] # If this eventlog is being triggered by something without a request, we # do not want to log it. This is usually some other form of logging # like Contributions or perhaps Versions in the future. - JMO 2012-05-14 if not request: return None # skip if pingdom if 'pingdom.com' in request.META.get('HTTP_USER_AGENT', ''): return None event_log = self.model() # Set the following fields to blank event_log.guid = "" event_log.source = "" event_log.event_id = 0 event_log.event_name = "" event_log.event_type = "" event_log.event_data = "" event_log.category = "" if 'instance' in kwargs: instance = kwargs['instance'] ct = ContentType.objects.get_for_model(instance) event_log.content_type = ct event_log.object_id = instance.pk event_log.headline = str(instance)[:50] event_log.model_name = ct.name event_log.application = instance.__module__ if hasattr(instance, 'guid'): event_log.uuid = str(instance.guid) event_log.entity = None if 'entity' in kwargs: event_log.entity = kwargs['entity'] # Allow a description to be added in special cases like impersonation event_log.description = "" if 'description' in kwargs: event_log.description = kwargs['description'] # Application is the name of the app that the event is coming from # # We get the app name via inspecting. Due to our update_perms_and_save util # we must filter out perms as an actual source. This is ok since there are # no views within perms. - JMO 2012-05-14 if 'application' in kwargs: event_log.application = kwargs['application'] if not event_log.application: event_log.application = inspect.getmodule(stack[1][0]).__name__ if "perms" in event_log.application.split('.'): event_log.application = inspect.getmodule(stack[2][0]).__name__ if "perms" in event_log.application.split('.'): event_log.application = inspect.getmodule( stack[3][0]).__name__ event_log.application = event_log.application.split('.') remove_list = [ 'tendenci', 'models', 'views', 'addons', 'core', 'apps', 'contrib' ] for item in remove_list: if item in event_log.application: event_log.application.remove(item) # Join on the chance that we are left with more than one item # in the list that we created event_log.application = ".".join(event_log.application) # Action is the name of the view that is being called # # We get it via the stack, but we filter out stacks that are named # 'save' or 'update_perms_and_save' to avoid getting the incorrect # view. We don't want to miss on a save method override or our own # updating. - JMO 2012-05-14 if 'action' in kwargs: event_log.action = kwargs['action'] else: event_log.action = stack[1][3] if stack[1][3] == "save": if stack[2][3] == "save" or stack[2][ 3] == "update_perms_and_save": if stack[3][3] == "update_perms_and_save": event_log.action = stack[4][3] else: event_log.action = stack[3][3] else: event_log.action = stack[2][3] if event_log.application == "base": event_log.application = "homepage" if 'user' in kwargs: user = kwargs['user'] else: user = request.user # set up the user information if user: # check for impersonation and set the correct user, descriptions, etc impersonated_user = getattr(user, 'impersonated_user', None) if impersonated_user: if event_log.description: event_log.description = '%s (impersonating %s)' % ( event_log.description, impersonated_user.username, ) else: event_log.description = '(impersonating %s)' % ( impersonated_user.username, ) if isinstance(user, AnonymousUser): event_log.username = '******' else: event_log.user = user event_log.username = user.username event_log.email = user.email # setup request meta information if request: if hasattr(request, 'COOKIES'): event_log.session_id = request.COOKIES.get('sessionid', '') if hasattr(request, 'META'): # Check for HTTP_X_REAL_IP first in case we are # behind a load balancer event_log.user_ip_address = request.META.get( 'HTTP_X_FORWARDED_FOR', request.META.get('REMOTE_ADDR', '')) if "," in event_log.user_ip_address: event_log.user_ip_address = event_log.user_ip_address.split( ",")[-1].replace(" ", "") event_log.user_ip_address = event_log.user_ip_address[-15:] event_log.http_referrer = smart_bytes(request.META.get( 'HTTP_REFERER', '')[:255], errors='replace') event_log.http_user_agent = smart_bytes(request.META.get( 'HTTP_USER_AGENT', ''), errors='replace') event_log.request_method = request.META.get( 'REQUEST_METHOD', '') event_log.query_string = request.META.get('QUERY_STRING', '') # take care of robots robot = Robot.objects.get_by_agent(event_log.http_user_agent) if robot: event_log.robot = robot try: event_log.server_ip_address = gethostbyname(gethostname()) except: try: event_log.server_ip_address = settings.INTERNAL_IPS[0] except: event_log.server_ip_address = '0.0.0.0' if hasattr(request, 'path'): event_log.url = request.path or '' # If we have an IP address, save the event_log if "." in event_log.user_ip_address: event_log.save() return event_log else: return None
def run_init(self): ''' call all initialziers after startup :return: ''' self.app.logger.info("Invoke Init") self.cache["init"] = sorted(self.cache["init"], key=lambda k: k['order']) for i in self.cache.get("init"): self.app.logger.info("INITIALIZER - METHOD %s PAHT %s: " % (i.get("function").__name__, str(inspect.getmodule(i.get("function")).__file__) )) i.get("function")(self)
def validate_rules(self): for state in self.stateinfo: # Validate all rules defined by functions for fname, f in self.funcsym[state]: line = f.__code__.co_firstlineno file = f.__code__.co_filename module = inspect.getmodule(f) self.modules.add(module) tokname = self.toknames[fname] if isinstance(f, types.MethodType): reqargs = 2 else: reqargs = 1 nargs = f.__code__.co_argcount if nargs > reqargs: self.log.error("%s:%d: Rule '%s' has too many arguments", file, line, f.__name__) self.error = True continue if nargs < reqargs: self.log.error("%s:%d: Rule '%s' requires an argument", file, line, f.__name__) self.error = True continue if not _get_regex(f): self.log.error( "%s:%d: No regular expression defined for rule '%s'", file, line, f.__name__) self.error = True continue try: c = re.compile('(?P<%s>%s)' % (fname, _get_regex(f)), self.reflags) if c.match(''): self.log.error( "%s:%d: Regular expression for rule '%s' matches empty string", file, line, f.__name__) self.error = True except re.error as e: self.log.error( "%s:%d: Invalid regular expression for rule '%s'. %s", file, line, f.__name__, e) if '#' in _get_regex(f): self.log.error( "%s:%d. Make sure '#' in rule '%s' is escaped with '\\#'", file, line, f.__name__) self.error = True # Validate all rules defined by strings for name, r in self.strsym[state]: tokname = self.toknames[name] if tokname == 'error': self.log.error("Rule '%s' must be defined as a function", name) self.error = True continue if tokname not in self.tokens and tokname.find('ignore_') < 0: self.log.error( "Rule '%s' defined for an unspecified token %s", name, tokname) self.error = True continue try: c = re.compile('(?P<%s>%s)' % (name, r), self.reflags) if (c.match('')): self.log.error( "Regular expression for rule '%s' matches empty string", name) self.error = True except re.error as e: self.log.error( "Invalid regular expression for rule '%s'. %s", name, e) if '#' in r: self.log.error( "Make sure '#' in rule '%s' is escaped with '\\#'", name) self.error = True if not self.funcsym[state] and not self.strsym[state]: self.log.error("No rules defined for state '%s'", state) self.error = True # Validate the error function efunc = self.errorf.get(state, None) if efunc: f = efunc line = f.__code__.co_firstlineno file = f.__code__.co_filename module = inspect.getmodule(f) self.modules.add(module) if isinstance(f, types.MethodType): reqargs = 2 else: reqargs = 1 nargs = f.__code__.co_argcount if nargs > reqargs: self.log.error("%s:%d: Rule '%s' has too many arguments", file, line, f.__name__) self.error = True if nargs < reqargs: self.log.error("%s:%d: Rule '%s' requires an argument", file, line, f.__name__) self.error = True for module in self.modules: self.validate_module(module)
def get_dataframe_package(df): """ Utility function to return the top level package (pandas/cudf) of DataFrame/Series/Index objects """ module = inspect.getmodule(df) package, _, _ = module.__name__.partition('.') return sys.modules[package]
def cls_str_of_meth(meth): mod = inspect.getmodule(meth) cls = meth.__qualname__.split('.<locals>', 1)[0].rsplit('.', 1)[0] return '{0}.{1}'.format(mod.__name__, cls)
def process(self, module): """Run a single reporting module. @param module: reporting module. @param results: results results from analysis. """ # Initialize current reporting module. try: current = module() except: log.exception( 'Failed to load the reporting module "{0}":'.format(module)) return # Extract the module name. module_name = inspect.getmodule(current).__name__ if "." in module_name: module_name = module_name.rsplit(".", 1)[1] try: options = self.cfg.get(module_name) except CuckooOperationalError: log.info("Reporting module %s not found in configuration file", module_name) return # If the reporting module is disabled in the config, skip it. if not options.enabled: return # Give it the path to the analysis results folder. current.set_path(self.analysis_path) # Give it the analysis task object. current.set_task(self.task) # Give it the the relevant reporting.conf section. current.set_options(options) # Load the content of the analysis.conf file. current.cfg = Config(cfg=current.conf_path) try: log.debug('Executing reporting module "%s"', current.__class__.__name__) pretime = datetime.now() if module_name == "submitCAPE" and self.reprocess: tasks = db.list_parents(self.task["id"]) if tasks: self.results["CAPE_children"] = tasks return else: current.run(self.results) posttime = datetime.now() timediff = posttime - pretime self.results["statistics"]["reporting"].append({ "name": current.__class__.__name__, "time": float("%d.%03d" % (timediff.seconds, timediff.microseconds / 1000)), }) except CuckooDependencyError as e: log.warning( 'The reporting module "%s" has missing dependencies: %s', current.__class__.__name__, e) except CuckooReportError as e: log.warning( 'The reporting module "%s" returned the following error: %s', current.__class__.__name__, e) except: log.exception('Failed to run the reporting module "%s":', current.__class__.__name__)
def caller_module(depth=1): frm = inspect.stack()[depth + 1] caller = inspect.getmodule(frm[0]) return caller
def dist_for_obj(obj): name = inspect.getmodule(obj).__name__.partition('.')[0] return dists_by_module.get(name)
def get_module_name(obj): try: return inspect.getmodule(obj).__name__ except: return None
def build(self, spec, prefix): """Build the package.""" args = self.build_args(spec, prefix) inspect.getmodule(self).scons(*args)
def get_imported_module_dir(cls): """Return the path of the module of this class or subclass.""" return os.path.dirname(inspect.getfile(inspect.getmodule(cls)))
def _pluginModule(): return inspect.getmodule(lambda: None)
def deprecated( key: str, replacement_key: Optional[str] = None, invalidation_version: Optional[str] = None, default: Optional[Any] = None, ) -> Callable[[Dict], Dict]: """ Log key as deprecated and provide a replacement (if exists). Expected behavior: - Outputs the appropriate deprecation warning if key is detected - Processes schema moving the value from key to replacement_key - Processes schema changing nothing if only replacement_key provided - No warning if only replacement_key provided - No warning if neither key nor replacement_key are provided - Adds replacement_key with default value in this case - Once the invalidation_version is crossed, raises vol.Invalid if key is detected """ module = inspect.getmodule(inspect.stack()[1][0]) if module is not None: module_name = module.__name__ else: # Unclear when it is None, but it happens, so let's guard. # https://github.com/home-assistant/home-assistant/issues/24982 module_name = __name__ if replacement_key and invalidation_version: warning = ("The '{key}' option (with value '{value}') is" " deprecated, please replace it with '{replacement_key}'." " This option will become invalid in version" " {invalidation_version}") elif replacement_key: warning = ("The '{key}' option (with value '{value}') is" " deprecated, please replace it with '{replacement_key}'") elif invalidation_version: warning = ("The '{key}' option (with value '{value}') is" " deprecated, please remove it from your configuration." " This option will become invalid in version" " {invalidation_version}") else: warning = ("The '{key}' option (with value '{value}') is" " deprecated, please remove it from your configuration") def check_for_invalid_version(value: Optional[Any]) -> None: """Raise error if current version has reached invalidation.""" if not invalidation_version: return if parse_version(__version__) >= parse_version(invalidation_version): raise vol.Invalid( warning.format( key=key, value=value, replacement_key=replacement_key, invalidation_version=invalidation_version, )) def validator(config: Dict) -> Dict: """Check if key is in config and log warning.""" if key in config: value = config[key] check_for_invalid_version(value) KeywordStyleAdapter(logging.getLogger(module_name)).warning( warning, key=key, value=value, replacement_key=replacement_key, invalidation_version=invalidation_version, ) if replacement_key: config.pop(key) else: value = default keys = [key] if replacement_key: keys.append(replacement_key) if value is not None and (replacement_key not in config or default == config.get(replacement_key)): config[replacement_key] = value return has_at_most_one_key(*keys)(config) return validator
def mock_run_from_terminal(mocker): caller = inspect.getmodule(inspect.currentframe().f_back) module_name = caller.__name__.replace("test", "check") mock = mocker.patch(f"hooks.{module_name}.sys.stderr") mock.isatty.return_value = True
def findsource(object): """Return the entire source file and starting line number for an object. The argument may be a module, class, method, function, traceback, frame, or code object. The source code is returned as a list of all the lines in the file and the line number indexes a line in that list. An IOError is raised if the source code cannot be retrieved. FIXED version with which we monkeypatch the stdlib to work around a bug.""" file = getsourcefile(object) or getfile(object) # If the object is a frame, then trying to get the globals dict from its # module won't work. Instead, the frame object itself has the globals # dictionary. globals_dict = None if inspect.isframe(object): # XXX: can this ever be false? globals_dict = object.f_globals else: module = getmodule(object, file) if module: globals_dict = module.__dict__ lines = linecache.getlines(file, globals_dict) if not lines: raise IOError('could not get source code') if ismodule(object): return lines, 0 if isclass(object): name = object.__name__ pat = re.compile(r'^(\s*)class\s*' + name + r'\b') # make some effort to find the best matching class definition: # use the one with the least indentation, which is the one # that's most probably not inside a function definition. candidates = [] for i in range(len(lines)): match = pat.match(lines[i]) if match: # if it's at toplevel, it's already the best one if lines[i][0] == 'c': return lines, i # else add whitespace to candidate list candidates.append((match.group(1), i)) if candidates: # this will sort by whitespace, and by line number, # less whitespace first candidates.sort() return lines, candidates[0][1] else: raise IOError('could not find class definition') if ismethod(object): object = object.im_func if isfunction(object): object = object.func_code if istraceback(object): object = object.tb_frame if isframe(object): object = object.f_code if iscode(object): if not hasattr(object, 'co_firstlineno'): raise IOError('could not find function definition') pat = re.compile(r'^(\s*def\s)|(.*(?<!\w)lambda(:|\s))|^(\s*@)') pmatch = pat.match # fperez - fix: sometimes, co_firstlineno can give a number larger than # the length of lines, which causes an error. Safeguard against that. lnum = min(object.co_firstlineno, len(lines)) - 1 while lnum > 0: if pmatch(lines[lnum]): break lnum -= 1 return lines, lnum raise IOError('could not find code object')
def docmodule(self, object, name=None, mod=None, *ignored): """Produce HTML documentation for a module object.""" name = object.__name__ # ignore the passed-in name try: all = object.__all__ except AttributeError: all = None parts = name.split('.') links = [] for i in range(len(parts) - 1): links.append( '<a href="{}.html" class="docmodule_link">{}</a>'.format( '.'.join(parts[:i + 1]), parts[i])) head = '.'.join(links + parts[-1:]) try: path = inspect.getabsfile(object) url = path if sys.platform == 'win32': import nturl2path url = nturl2path.pathname2url(path) filelink = self.filelink(url, path) except TypeError: filelink = '(built-in)' info = [] if hasattr(object, '__version__'): version = str(object.__version__) if version[:11] == '$' + 'Revision: ' and version[-1:] == '$': version = version[11:-1].strip() info.append('version %s' % self.escape(version)) if hasattr(object, '__date__'): info.append(self.escape(str(object.__date__))) if info: head = head + ' (%s)' % ', '.join(info) docloc = self.getdocloc(object) if docloc is not None: docloc = ('<br><a href="%(docloc)s">Module Reference</a>' % locals()) else: docloc = '' extras = '<a href=".">index</a><br>' + filelink + docloc result = self.heading(head, extras) modules = inspect.getmembers(object, inspect.ismodule) classes, cdict = [], {} for key, value in inspect.getmembers(object, inspect.isclass): # if __all__ exists, believe it. Otherwise use old heuristic. if (all is not None or (inspect.getmodule(value) or object) is object): if visiblename(key, all, object): classes.append((key, value)) cdict[key] = cdict[value] = '#' + key for key, value in classes: for base in value.__bases__: key, modname = base.__name__, base.__module__ module = sys.modules.get(modname) if modname != name and module and hasattr(module, key): if getattr(module, key) is base: if key not in cdict: cdict[key] = cdict[base] = (modname + '.html#' + key) funcs, fdict = [], {} for key, value in inspect.getmembers(object, inspect.isroutine): # if __all__ exists, believe it. Otherwise use old heuristic. if (all is not None or inspect.isbuiltin(value) or inspect.getmodule(value) is object): if visiblename(key, all, object): funcs.append((key, value)) fdict[key] = '#-' + key if inspect.isfunction(value): fdict[value] = fdict[key] data = [] for key, value in inspect.getmembers(object, isdata): if visiblename(key, all, object): data.append((key, value)) doc = self.markup(getdoc(object), self.preformat, fdict, cdict) doc = doc and '<code>{}</code>'.format(doc) result = result + '<p>%s</p>\n' % doc if hasattr(object, '__path__'): modpkgs = [] for importer, modname, ispkg in pkgutil.iter_modules( object.__path__): modpkgs.append((modname, name, ispkg, 0)) modpkgs.sort() contents = self.multicolumn(modpkgs, self.modpkglink) result = result + self.bigsection( 'Package Contents', contents, css_class="package") elif modules: contents = self.multicolumn(modules, lambda t: self.modulelink(t[1])) result = result + self.bigsection( 'Modules', contents, css_class="module") if classes: classlist = [value for (key, value) in classes] contents = [ self.formattree(inspect.getclasstree(classlist, 1), name) ] for key, value in classes: contents.append( self.document(value, key, name, fdict, cdict)) result = result + self.bigsection( 'Classes', ' '.join(contents), css_class="classes") if funcs: contents = [] for key, value in funcs: contents.append( self.document(value, key, name, fdict, cdict)) result = result + self.bigsection( 'Functions', ' '.join(contents), css_class="functions") if data: contents = [] for key, value in data: contents.append(self.document(value, key)) result = result + self.bigsection( 'Data', '<br>\n'.join(contents), css_class="data") if hasattr(object, '__author__'): contents = self.markup(str(object.__author__), self.preformat) result = result + self.bigsection( 'Author', contents, css_class="author") if hasattr(object, '__credits__'): contents = self.markup(str(object.__credits__), self.preformat) result = result + self.bigsection( 'Credits', contents, css_class="credits") return result
def get(self, _): frs = inspect.getouterframes(inspect.currentframe()) for fr in frs[2:]: mo = inspect.getmodule(fr.frame) if mo is None or mo.__name__.partition('.')[0] != 'anyioc': return fr
def _sequential(defn_env: dict, async_reset: bool, cls, combinational_decorator: typing.Callable): # if not inspect.isclass(cls): # raise ValueError("sequential decorator only works with classes") initial_value_map = get_initial_value_map(cls.__init__, defn_env) call_def = get_ast(cls.__call__).body[0] inputs, output_type = get_io(call_def) io_list = gen_io_list(inputs, output_type, async_reset) circuit_combinational_output_type = [] circuit_combinational_args = [] circuit_combinational_call_args = [] comb_out_wiring = [] for name, type_ in inputs: type_ = astor.to_source(type_).rstrip() circuit_combinational_args.append(f"{name}: {type_}") circuit_combinational_call_args.append(f"io.{name}") comb_out_count = 0 for name, (value, type_, eval_type, eval_value) in initial_value_map.items(): if isinstance(eval_type, m.Kind): type_ = astor.to_source(type_).rstrip() circuit_combinational_args.append(f"self_{name}_O: {type_}") circuit_combinational_call_args.append(f"{name}") circuit_combinational_output_type.append(f"{type_}") comb_out_wiring.append(f"{name}.I <= comb_out[{comb_out_count}]\n") comb_out_count += 1 else: for key, value in eval_value.interface.ports.items(): if isinstance(value, (m.ClockType, m.AsyncResetType)): continue type_ = repr(type(value)) if value.isoutput(): circuit_combinational_args.append( f"self_{name}_{value}: m.{type_}") circuit_combinational_call_args.append(f"{name}.{value}") if value.isinput(): circuit_combinational_output_type.append(f"m.{type_}") comb_out_wiring.append( f"{name}.{value} <= comb_out[{comb_out_count}]\n") comb_out_count += 1 circuit_combinational_args = ', '.join(circuit_combinational_args) circuit_combinational_call_args = ', '.join( circuit_combinational_call_args) if isinstance(output_type, ast.Tuple): output_types = [] for i, elem in enumerate(output_type.elts): circuit_combinational_output_type.append( astor.to_source(elem).rstrip()) comb_out_wiring.append( f"io.O{i} <= comb_out[{comb_out_count + i}]\n") else: output_type_str = astor.to_source(output_type).rstrip() circuit_combinational_output_type.append(output_type_str) comb_out_wiring.append(f"io.O <= comb_out[{comb_out_count}]\n") tab = 4 * ' ' comb_out_wiring = (3 * tab).join(comb_out_wiring) circuit_combinational_output_type = ', '.join( circuit_combinational_output_type) circuit_combinational_body = [] for stmt in call_def.body: rewriter = RewriteSelfAttributes(initial_value_map) stmt = rewriter.visit(stmt) code = [stmt] if rewriter.calls_seen: code = rewriter.calls_seen + code stmt = RewriteReturn(initial_value_map).visit(stmt) for stmt in code: for line in astor.to_source(stmt).rstrip().splitlines(): circuit_combinational_body.append(line) circuit_combinational_body = ('\n' + 4 * tab).join(circuit_combinational_body) register_instances = gen_register_instances(initial_value_map, async_reset) register_instances = ('\n' + 3 * tab).join(register_instances) circuit_definition_str = circuit_definition_template.format( circuit_name=cls.__name__, io_list=io_list, register_instances=register_instances, circuit_combinational_args=circuit_combinational_args, circuit_combinational_output_type=circuit_combinational_output_type, circuit_combinational_body=circuit_combinational_body, circuit_combinational_call_args=circuit_combinational_call_args, comb_out_wiring=comb_out_wiring) tree = ast.parse(circuit_definition_str) if "DefineRegister" not in defn_env: tree = ast.Module([ ast.parse("from mantle import DefineRegister").body[0], ] + tree.body) circuit_def_constructor = ast_utils.compile_function_to_file( tree, 'make_' + cls.__name__, defn_env) circuit_def = circuit_def_constructor(combinational_decorator) if get_debug_mode() and getattr(circuit_def, "debug_info", False): circuit_def.debug_info = debug_info(circuit_def.debug_info.filename, circuit_def.debug_info.lineno, inspect.getmodule(cls)) return circuit_def
def critical(error_str: str): caller_mod_name = inspect.getmodule(inspect.stack()[1][0]).__name__ logging.getLogger('necrobot').critical('[{0}] {1}'.format(caller_mod_name, error_str), exc_info=True)
def install(self, spec, prefix): """Make the install targets""" with working_dir(self.build_directory): inspect.getmodule(self).ninja(*self.install_targets)
def warning(error_str: str): caller_mod_name = inspect.getmodule(inspect.stack()[1][0]).__name__ logging.getLogger('necrobot').warning('[{0}] {1}'.format(caller_mod_name, error_str))
def info(info_str: str): caller_mod_name = inspect.getmodule(inspect.stack()[1][0]).__name__ logging.getLogger('necrobot').info('[{0}] {1}'.format(caller_mod_name, info_str))
def calling_package(frame=2): frame = stack()[frame] return getmodule(frame[0]).__name__.split('.')[0]
def _get_servicer_add_func(self, servicer): for b in servicer.__bases__: if b.__name__.endswith('Servicer'): m = inspect.getmodule(b) return getattr(m, 'add_{}_to_server'.format(b.__name__))
def calling_module(frame=2): frame = stack()[frame] return getmodule(frame[0])
def build(self, spec, prefix): """Make the build targets""" options = ['-v'] options += self.build_targets with working_dir(self.build_directory): inspect.getmodule(self).ninja(*options)