def _find_annotated_callables(self, class_marker, component_ns, config, context): annotated_callables = [] for name, member in inspect.getmembers(component_ns): #Find Classes marked for loading if inspect.isclass(member) and hasattr(member, class_marker): #Handle Requirements if hasattr(member, '__requires__'): for req in member.__requires__: self._require(req) obj = member(context, config) #Save the context for this obj if the class_marker is a str context_name = getattr(obj, class_marker) if isinstance(context_name, basestring): context[context_name] = obj #Search for hooked instance methods for name, thing in inspect.getmembers(obj): if (isinstance(thing, collections.Callable) and hasattr(thing, '__plugs__')): annotated_callables.append(thing) #Find Functions with Hooks if (isinstance(member, collections.Callable) and hasattr(member, '__plugs__')): annotated_callables.append(member) return annotated_callables
def onOpen(self): print "New connection" # open a database connection for this session self.connection = psycopg2.connect(config.POSTGRESQL_CONNECT_STRING) self.connection.set_session(readonly=True) self.BinaryFormats = inspect.getmembers(sys.modules["formats.binary.binary"], inspect.isclass) # get available binary data formats by traversing the class hierarchy self.TextFormats = inspect.getmembers(sys.modules["formats.text.text"], inspect.isclass) #hardcoded values for data formats, transferred to and from the client in binary and text formats. self.DataFormatTypes = { 1 : "FormatGeoJSONTiles", 2 : "Format1BCachedTiles", 3 : "Format8BOnDemand", 4 : "Format2BOnDemand", 5 : "Format1BTilesOnDemand", 6 : "FormatGeoJSONTilesSimple", 7 : "Format1BCachedTilesSimple", 8 : "Format8BOnDemandSimple", 9 : "Format2BOnDemandSimple" } #get a reverse lookup table for event types self.rEventTypes = {v:k for k, v in events.EventTypes.items()} #event lookup table self.eve = dict() self.eve["GetLayer"] = events.GetLayer() self.eve["GetAllLayers"] = events.GetAllLayers() self.eve["SyncTime"] = events.SyncTime()
def printExtensions(): ''' Echoes all entities in our extension module. Useful to create documentation. ''' print(" Types:") for name, o in inspect.getmembers(frepple): if not inspect.isclass(o) or issubclass(o,Exception) or hasattr(o,"__iter__"): continue print(" %s: %s" % (o.__name__, inspect.getdoc(o))) print(" Methods:") for name, o in inspect.getmembers(frepple): if not inspect.isroutine(o): continue print(" %s: %s" % (o.__name__, inspect.getdoc(o))) print(" Exceptions:") for name, o in inspect.getmembers(frepple): if not inspect.isclass(o) or not issubclass(o,Exception): continue print(" %s" % (o.__name__)) print(" Iterators:") for name, o in inspect.getmembers(frepple): if not inspect.isclass(o) or not hasattr(o,"__iter__"): continue print(" %s: %s" % (o.__name__, inspect.getdoc(o))) print(" Other:") for name, o in inspect.getmembers(frepple): # Negating the exact same filters as in the previous blocks if not(not inspect.isclass(o) or issubclass(o,Exception) or hasattr(o,"__iter__")): continue if inspect.isroutine(o): continue if not(not inspect.isclass(o) or not issubclass(o,Exception)): continue if not(not inspect.isclass(o) or not hasattr(o,"__iter__")): continue print(" %s: %s" % (name, o))
def generate_tests(settings): """create parameterized tests""" test_cases = unittest.TestSuite() cases = import_module(settings['cases']) pdf_a_im = pyPdf.PdfFileReader(file(settings['pdf_a'], "rb")) total_a_pages = pdf_a_im.getNumPages() pdf_b_im = pyPdf.PdfFileReader(file(settings['pdf_b'], "rb")) total_b_pages = pdf_b_im.getNumPages() settings['include'] = list(set(settings['include']) -set(settings['exclude'])) for case_name in settings['include']: TestClass = cases.__getattribute__(case_name) setattr(TestClass,'_settings',settings) SuperClass = inspect.getmro(TestClass)[1] method_list = inspect.getmembers(TestClass, predicate=inspect.ismethod) super_method_list = inspect.getmembers(SuperClass,predicate=inspect.ismethod) test_method_list = list(set(method_list)-set(super_method_list)) test_name_list = [ method[0] for method in test_method_list if method[0]!='tearDownClass' and method[0]!='setUpClass'] for test_name in test_name_list: for pi in range(1,total_a_pages+1): for pj in range(1,total_b_pages+1): test_cases.addTest(TestClass(test_name, pi, pj)) return test_cases
def equationlist_3D(): htmlToReturn = "" # build this as we progress htmlToReturn += "<table border=1>" for submodule in inspect.getmembers(pyeq2.Models_3D): if inspect.ismodule(submodule[1]): for equationClass in inspect.getmembers(submodule[1]): if inspect.isclass(equationClass[1]): for extendedVersionName in ["Default", "Offset"]: if (-1 != extendedVersionName.find("Offset")) and ( equationClass[1].autoGenerateOffsetForm == False ): continue equation = equationClass[1]("SSQABS", extendedVersionName) htmlToReturn += "<tr>" htmlToReturn += "<td nowrap>3D " + submodule[0] + "</td>" htmlToReturn += "<td nowrap>" + equation.GetDisplayName() + "</td>" htmlToReturn += "<td nowrap>" + equation.GetDisplayHTML() + "</td>" htmlToReturn += "</tr>" htmlToReturn += "</table>" return "<html><body>" + htmlToReturn + "</body></html>"
def test_coverage(): """Make sure a new thrift RPC doesn't get added without minimal test coverage.""" for name, klass in inspect.getmembers(AuroraAdmin) + inspect.getmembers(AuroraSchedulerManager): if name.endswith('_args'): rpc_name = name[:-len('_args')] assert hasattr(TestSchedulerProxyAdminInjection, 'test_%s' % rpc_name), ( 'No test defined for RPC %s' % rpc_name)
def commit_uids(db_session, new_uids): try: msg = u"count: {}".format(len(new_uids)) log.info("Commit new UIDs", message=msg, new_committed_message_count=len(new_uids)) db_session.add_all(new_uids) db_session.commit() except DataError as e: db_session.rollback() log.error("Issue inserting new UIDs into database. " "This probably means that an object's property is " "malformed or way too long, etc.") for uid in new_uids: log.error(uid) import inspect from pprint import pformat log.error(inspect.getmembers(uid)) try: log.error(pformat(uid.__dict__, indent=2)) except AttributeError: pass for part in uid.message.parts: log.error(inspect.getmembers(part)) try: log.error(pformat(part.__dict__, indent=2)) except AttributeError: pass raise e
def echo_module(mod, write=sys.stdout.write): """ Echo calls to functions and methods in a module. """ for fname, fn in inspect.getmembers(mod, inspect.isfunction): setattr(mod, fname, echo(fn, write)) for _, klass in inspect.getmembers(mod, inspect.isclass): echo_class(klass, write)
def _read_options(cls, name, bases, attrs): """ Parses `ModelOptions` instance into the options value attached to `Model` instances. """ options_members = {} for base in reversed(bases): if hasattr(base, "_options"): for k, v in inspect.getmembers(base._options): if not k.startswith("_") and not k == "klass": options_members[k] = v options_class = attrs.get('__optionsclass__', ModelOptions) if 'Options' in attrs: for k, v in inspect.getmembers(attrs['Options']): if not k.startswith("_"): if k == "roles": roles = options_members.get("roles", {}).copy() roles.update(v) options_members["roles"] = roles else: options_members[k] = v return options_class(cls, **options_members)
def __contains__(self, item): # arm, fk """ Return True if item is used in this module. Mainly used for efficient reloading. """ if isinstance(item, Plugin): item_module = item.module elif inspect.ismodule(item): item_module = item else: raise NotImplementedError("Unexpected type {0} for value {1}.".format(type(item), item)) # If there's no associated module, we deduct that it cannot be contained. if item.module is None: return False item_module_name = item_module.__name__ # Check for module import # ex: import my_module for module_name, module in inspect.getmembers(self.module, inspect.ismodule): if module == item_module: return True # Check class in case of indirect module import # ex: from my_module import my_class for class_name, cls in inspect.getmembers(self.module, inspect.isclass): if cls.__module__ == item_module_name: return True return False
def echo_class(klass, write=sys.stdout.write): """ Echo calls to class methods and static functions """ for _, method in inspect.getmembers(klass, inspect.ismethod): echo_instancemethod(klass, method, write) for _, fn in inspect.getmembers(klass, inspect.isfunction): setattr(klass, name(fn), staticmethod(echo(fn, write)))
def wrap_typecheck_functions(prefix = "generator", ignore = "generator.tools.typecheck"): functions = set() fixup = defaultdict(list) # Collect all possible functions for name, module in sys.modules.items(): if name.startswith(prefix) or name == "__main__": for name, element in getmembers(module): # Plain functions if isfunction(element): functions.add(element) fixup[element].append((module, name)) # Class methods if isclass(element): for name_, method in getmembers(element): if isfunction(method): functions.add(method) fixup[method].append((element, name_)) for function in functions: if len(function.__annotations__) == 0: continue if function.__module__ == ignore: continue wrapped = typecheck(function) for obj, attrname in fixup[function]: setattr(obj, attrname, wrapped)
def discover_handler_classes(handlers_package): """ Looks for handler classes within handler path module. Currently it's not looking deep into nested module :param handlers_package: module path to handlers :type handlers_package: string :return: list of handler classes """ if handlers_package is None: return # Add working directory into PYTHONPATH to import developer packages sys.path.insert(0, os.getcwd()) try: package = import_module(handlers_package) handler_classes = [class_obj for _, class_obj in inspect.getmembers(package, is_handler_class)] # Continue searching for module if package is not a module if hasattr(package, '__path__'): for _, modname, _ in pkgutil.iter_modules(package.__path__): module = import_module('{package}.{module}'.format(package=package.__name__, module=modname)) handler_classes += [class_obj for _, class_obj in inspect.getmembers(module, is_handler_class)] except ImportError: raise return handler_classes
def print_classes(): for name, obj in inspect.getmembers(jb): if inspect.isclass(obj): #f1.write('paichi_the_found' f1.write('\n') f1.write("Class Name -> "+name) f1.write('\n') for N, O in inspect.getmembers(obj): #f1.write(' --- Extra --- ' if inspect.isclass(O): f1.write(" "+"SubClass Name -> "+N) f1.write('\n') if inspect.ismethod(O): f1.write(" "+"SubMethod Name -> "+N) f1.write('\n') if inspect.isfunction(O): f1.write(" "+"SubFunction Name -> "+N) f1.write('\n') if inspect.ismethod(obj): #f1.write('paichi_the_found' f1.write('') f1.write('\n') f1.write("Method Name -> "+name) f1.write('\n') if inspect.isfunction(obj): #f1.write('paichi_the_found' f1.write('') f1.write('\n') f1.write("Function Name -> "+name) f1.write('\n')
def __init__(self, verb_decorators): # Add all verb_decorators methods not starting with '_' as members. for name, function in inspect.getmembers(verb_decorators, inspect.ismethod): if not name.startswith('_'): setattr(self, name, function) # Expose all BaseOption and BaseArgument derivatives used for declaring # options and arguments in command decorators. for name, cls in inspect.getmembers(cli, inspect.isclass): if issubclass(cls, cli.BaseOption) or issubclass(cls, cli.BaseArgument): setattr(self, name, cls) # Expose specific useful voltdbclient symbols for Volt client commands. self.VoltProcedure = voltdbclient.VoltProcedure self.VoltResponse = voltdbclient.VoltResponse self.VoltException = voltdbclient.VoltException self.VoltTable = voltdbclient.VoltTable self.VoltColumn = voltdbclient.VoltColumn self.FastSerializer = voltdbclient.FastSerializer # For declaring multi-command verbs like "show". self.Modifier = Modifier # Bundles self.ConnectionBundle = ConnectionBundle self.ClientBundle = ClientBundle self.AdminBundle = AdminBundle self.ServerBundle = ServerBundle # As a convenience expose the utility module so that commands don't # need to import it. self.utility = utility
def test_docstring_parameters(): """Test module docsting formatting""" if docscrape is None: raise SkipTest("This must be run from the vispy source directory") incorrect = [] for name in public_modules: module = __import__(name, globals()) for submod in name.split(".")[1:]: module = getattr(module, submod) classes = inspect.getmembers(module, inspect.isclass) for cname, cls in classes: if cname.startswith("_"): continue with warnings.catch_warnings(record=True) as w: cdoc = docscrape.ClassDoc(cls) if len(w): raise RuntimeError("Error for __init__ of %s in %s:\n%s" % (cls, name, w[0])) if hasattr(cls, "__init__"): incorrect += check_parameters_match(cls.__init__, cdoc) for method_name in cdoc.methods: method = getattr(cls, method_name) # skip classes that are added as attributes of classes if inspect.ismethod(method) or inspect.isfunction(method): incorrect += check_parameters_match(method) if hasattr(cls, "__call__"): incorrect += check_parameters_match(cls.__call__) functions = inspect.getmembers(module, inspect.isfunction) for fname, func in functions: if fname.startswith("_"): continue incorrect += check_parameters_match(func) msg = "\n" + "\n".join(sorted(list(set(incorrect)))) if len(incorrect) > 0: msg += "\n\n%s docstring violations found" % msg.count("\n") raise AssertionError(msg)
def print_functions(f, outdir): outdir = os.path.relpath(outdir, os.path.dirname(f.name)) packages = get_subpackages(fnss) for pkg in packages: header = ":mod:`%s` package" % pkg f.write(header + "\n" + "-"*len(header) + "\n"*2) modules = inspect.getmembers(eval('fnss.%s' % pkg), predicate=inspect.ismodule) module_names = [mod[0] for mod in modules if not mod[0].startswith("test")] module_paths = {} for name in module_names: fn = eval('fnss.%s.__name__' % name) module_paths[fn] = name sorted_paths = sorted(module_paths.keys()) for mod in sorted_paths: functions = inspect.getmembers(eval(mod), predicate=inspect.isfunction) function_names = [func[0] for func in functions if (hasattr(eval(mod), '__all__') and func[0] in eval('%s.__all__' % mod)) ] f.write(".. automodule:: %s.%s.%s\n" ".. autosummary::\n" " :toctree: %s/\n\n" % ('fnss', pkg, module_paths[mod], outdir)) for func in function_names: f.write(" %s\n" % func) f.write("\n") f.write("\n")
def test_all_classes(mod, skip_test_list = list(), skip_containers = True ): # returns True if all of the class extensions fail return_value = True # loop over all the members of the try: foo = inspect.getmembers(mod) except AttributeError: print("Time to inspect inspect...") print(dir(inspect)) print("name = %s" % inspect.__name__) print("dumping docs...\n %s" % inspect.__doc__) msg_fmt = "Python dynamism is alive and well for (%s) " for k,v in inspect.getmembers(mod): # skip classes in this list if k in skip_test_list : continue # check the type is a Boost.Python.class, # which is going to be anything we provided pybindings for if str(type(v)).find("Boost.Python.class") > 0 : try: # create and instance of the class # this will fail for classes with non-trivial constructors o = v() # we want to skip containers if hasattr(o, "__iter__") : continue # now test it if not ExtendClassFails( o ) : print(msg_fmt % k) return_value = False except : warnings.warn("class %s needs a nullary constructor. Either expose one or add '%s' to skip_test_list." % (k, k)) return_value = False continue return return_value
def test_docstring_parameters(): """Test module docstring formatting.""" from numpydoc import docscrape incorrect = [] for name in public_modules: with pytest.warns(None): # traits warnings module = __import__(name, globals()) for submod in name.split('.')[1:]: module = getattr(module, submod) classes = inspect.getmembers(module, inspect.isclass) for cname, cls in classes: if cname.startswith('_') and cname not in _doc_special_members: continue with pytest.warns(None) as w: cdoc = docscrape.ClassDoc(cls) for ww in w: if 'Using or importing the ABCs' not in str(ww.message): raise RuntimeError('Error for __init__ of %s in %s:\n%s' % (cls, name, ww)) if hasattr(cls, '__init__'): incorrect += check_parameters_match(cls.__init__, cdoc, cls) for method_name in cdoc.methods: method = getattr(cls, method_name) incorrect += check_parameters_match(method, cls=cls) if hasattr(cls, '__call__'): incorrect += check_parameters_match(cls.__call__, cls=cls) functions = inspect.getmembers(module, inspect.isfunction) for fname, func in functions: if fname.startswith('_'): continue incorrect += check_parameters_match(func) msg = '\n' + '\n'.join(sorted(list(set(incorrect)))) if len(incorrect) > 0: raise AssertionError(msg)
def cls_decorator(cls): # Retrieve the base class of the composite. Inspect its methods and decide which ones will be overridden def no_special_no_private(x): return inspect.ismethod(x) and not x.__name__.startswith('_') # Patch the behavior of each of the methods in the previous list. This is done associating an instance of the # descriptor below to any method that needs to be patched. class IterateOver(object): """ Decorator used to patch methods in a composite. It iterates over all the items in the instance containing the associated attribute and calls for each of them an attribute with the same name """ def __init__(self, name, func=None): self.name = name self.func = func def __get__(self, instance, owner): def getter(*args, **kwargs): for item in instance: getattr(item, self.name)(*args, **kwargs) # If we are using this descriptor to wrap a method from an interface, then we must conditionally # use the `functools.wraps` decorator to set the appropriate fields. if self.func is not None: getter = functools.wraps(self.func)(getter) return getter dictionary_for_type_call = {} # Construct a dictionary with the methods explicitly passed as name if method_list is not None: # [email protected]: method_list_dict = {name: IterateOver(name) for name in method_list} method_list_dict = {} for name in method_list: method_list_dict[name] = IterateOver(name) dictionary_for_type_call.update(method_list_dict) # Construct a dictionary with the methods inspected from the interface if interface is not None: ########## # [email protected]: interface_methods = {name: method for name, method in inspect.getmembers(interface, predicate=no_special_no_private)} interface_methods = {} for name, method in inspect.getmembers(interface, predicate=no_special_no_private): interface_methods[name] = method ########## # [email protected]: interface_methods_dict = {name: IterateOver(name, method) for name, method in interface_methods.iteritems()} interface_methods_dict = {} for name, method in interface_methods.iteritems(): interface_methods_dict[name] = IterateOver(name, method) ########## dictionary_for_type_call.update(interface_methods_dict) # Get the methods that are defined in the scope of the composite class and override any previous definition ########## # [email protected]: cls_method = {name: method for name, method in inspect.getmembers(cls, predicate=inspect.ismethod)} cls_method = {} for name, method in inspect.getmembers(cls, predicate=inspect.ismethod): cls_method[name] = method ########## dictionary_for_type_call.update(cls_method) # Generate the new class on the fly and return it # FIXME : inherit from interface if we start to use ABC classes? wrapper_class = type(cls.__name__, (cls, container), dictionary_for_type_call) return wrapper_class
def get_all_masks(whitelist=["mask"], directory=THISDIR): """import all feature masks and return a list""" masks = list() logging.debug(directory) sys.path.append(directory) mask = __import__("mask") mask_classes = inspect.getmembers(mask, inspect.isclass) Mask = dict(mask_classes)["Mask"] for rel_path in glob.glob(join(directory, "*.py")): name = basename(rel_path)[:-3] if "__" in name or name not in whitelist: logging.debug("ignored %s", name) pass else: logging.debug("importing %s", name) m = __import__(name) for name, class_ in inspect.getmembers(m, inspect.isclass): logging.debug("%s: %s(%s)", name, class_.__bases__, class_) # search for Mask in parent classes if __has_parent(class_, Mask) and class_ != Mask: masks.append(class_) sys.path.remove(directory) logging.debug(masks) return masks
def test_docstring_parameters(): """Test module docsting formatting""" if docscrape is None: raise SkipTest('This must be run from the mne-python source directory') incorrect = [] for name in public_modules: module = __import__(name, globals()) classes = inspect.getmembers(module, inspect.isclass) for cname, cls in classes: if cname.startswith('_'): continue with warnings.catch_warnings(record=True) as w: cdoc = docscrape.ClassDoc(cls) if len(w): raise RuntimeError('Error for __init__ of %s in %s:\n%s' % (cls, name, w[0])) if hasattr(cls, '__init__'): incorrect += check_parameters_match(cls.__init__, cdoc) for method_name in cdoc.methods: method = getattr(cls, method_name) incorrect += check_parameters_match(method) if hasattr(cls, '__call__'): incorrect += check_parameters_match(cls.__call__) functions = inspect.getmembers(module, inspect.isfunction) for fname, func in functions: if fname.startswith('_'): continue incorrect += check_parameters_match(func) if len(incorrect) > 0: print('\n'.join(sorted(incorrect))) raise AssertionError('%s docstring errors' % len(incorrect))
def __init__(self): classes = inspect.getmembers(interface.objects, inspect.isclass) for name, clzz in classes: model_classes[name] = clzz classes = inspect.getmembers(interface.messages, inspect.isclass) for name, clzz in classes: message_classes[name] = clzz
def getexternalmodules(module): """ returns list of external dependency modules excluding builtins and library packages """ paths = (os.path.abspath(p) for p in sys.path) stdlib = {p for p in paths if p.startswith(sys.prefix) and 'site-packages' not in p} # get modules excluding current package packagename = module.__name__.split(".")[0] modules = [m for name, m in i.getmembers(module, i.ismodule) if m.__name__.split(".")[0] != packagename] # remove child modules remove = [] for m in modules: remove.extend([m2 for m2 in modules if m!=m2 and m2.__name__.startswith(m.__name__)]) for m in remove: modules.remove(m) # get packages for imported classes classes = [] for name, m in i.getmembers(module, i.isclass): classpackagename = m.__module__.split(".")[0] if classpackagename != packagename: classes.append(import_module(classpackagename)) # remove duplicates, builtin, stdlib external = [m for m in list(set(modules+classes)) \ if not (m.__name__ in sys.builtin_module_names \ or os.path.dirname(getmodpath(m)) in stdlib)] return external
def get_members(self): '''get_members(self): get information on functions, classes, methods, handlers, etc. from the Python code''' self.functions = {} self.classes = {} self.methods = [] self.module = __import__(self.module) # functions l = inspect.getmembers(self.module, inspect.isroutine) for name, obj in l: self.functions[name] = obj # classes l = inspect.getmembers(self.module, inspect.isclass) for name, obj in l: self.classes[name] = obj # methods in the class methods = inspect.getmembers(obj, inspect.ismethod) for m in methods: self.methods.append((name, m[0])) # find init method (not __init__) since we will be replacing # it with a new method each time if m[0] == 'init': self.init_lines, self.init_start = \ inspect.getsourcelines(m[1])
def find_untested_methods(): """finds all untested functions in this module by searching for method names in test case method names.""" untested = [] avoid_funcs = ["main", "run_tests", "run_main", "copy", "deepcopy"] test_funcs = [] # get a list of all classes in this module classes = inspect.getmembers(sys.modules[__name__], inspect.isclass) # for each class.. for (name, klass) in classes: # only look at those that have tests if issubclass(klass, unittest.TestCase): # look at this class' methods funcs = inspect.getmembers(klass, inspect.ismethod) # for each method.. for (name2, func) in funcs: # store the ones that begin with test_ if "test_" in name2 and name2[0:5] == "test_": test_funcs.append([name2, func]) # assemble a list of all test method names (test_x, test_y, ..) tested_names = [funcz[0] for funcz in test_funcs] # now get a list of all functions in this module funcs = inspect.getmembers(sys.modules[__name__], inspect.isfunction) # for each function.. for (name, func) in funcs: # we don't care about some of these if name in avoid_funcs: continue # skip functions beginning with _ if name[0] == "_": continue # check if this function has a test named after it has_test = check_has_test(name, tested_names) if not has_test: untested.append(name) return untested
def getAnimatableAttributes(object): # return two dicts that contain a dict for each attribute of object # that can be animated. The first dict is for attribute foudn in # actorsDescr, the second is for attributes picked up on the fly # merge the dict of attribute for all base classes d1 = {} for klass, d2 in actorsDescr.items(): if isinstance(object, klass): d1.update(d2) d2 = {} # find all attribute that are float attrs = inspect.getmembers(object, lambda x: isinstance(x, float)) for name,value in attrs: if d1.has_key(name): continue d2[name] = { 'interp': FloatScalarInterpolator, 'interpKw': {'values':[value, value]}, 'valueWidget': ThumbWheel, 'valueWidgetKw': {'type': 'float', 'initialValue':value}, } # find all attribute that are bool or int attrs = inspect.getmembers(object, lambda x: isinstance(x, int)) for name,value in attrs: if d1.has_key(name): continue d2[name] = { 'interp': IntScalarInterpolator, 'interpKw': {'values':[value, value]}, 'valueWidget': ThumbWheel, 'valueWidgetKw': {'type':'int', 'initialValue':value}, } return d1, d2
def _get_all_props(self): 'return all properties accessible through get' all_props = [] # release properties rel_props = getmembers(self.release, predicate=lambda p: (not ismethod(p))) rel_props = [a[0] for a in rel_props if not a[0].startswith('_')] all_props.extend(rel_props) # element_type properties et_props = getmembers(self.element_type, predicate=lambda p: (not ismethod(p))) 'remove _state - update this after we change _state to _state' et_props = [a[0] for a in et_props if not a[0].startswith('_') and a[0] != '_state'] all_props.extend(et_props) # properties for each of the initializer objects i_props = [] for val in self.element_type.initializers: toadd = getmembers(val, lambda p: (not ismethod(p))) i_props.extend([a[0] for a in toadd if not a[0].startswith('_') and a[0] != '_state']) all_props.extend(i_props) return all_props
def generateConfigFile(self): import pisi.configfile destpath = os.path.join(self.root, "etc/pisi/") if not os.path.exists(destpath): os.makedirs(destpath) confFile = os.path.join(destpath, "pisi.conf") if os.path.isfile(confFile): # Don't overwrite existing pisi.conf return pisiconf = open(confFile, "w") klasses = inspect.getmembers(pisi.configfile, inspect.isclass) defaults = [klass for klass in klasses if klass[0].endswith('Defaults')] for d in defaults: section_name = d[0][:-len('Defaults')].lower() pisiconf.write("[%s]\n" % section_name) section_members = [m for m in inspect.getmembers(d[1]) \ if not m[0].startswith('__') \ and not m[0].endswith('__')] for member in section_members: if member[1] == None or member[1] == "": pisiconf.write("# %s = %s\n" % (member[0], member[1])) else: pisiconf.write("%s = %s\n" % (member[0], member[1])) pisiconf.write('\n')
def process_members(self, package_name, mod): """ Process all members of the package or module passed. """ name = mod.__name__ for k, m in inspect.getmembers(mod): self.log.debug("in %s processing element %s" % (mod.__name__, k)) if not inspect.isclass(m) and hasattr(m, '__module__') and m.__module__ and m.__module__.startswith(package_name): key = "%s.%s" % (m.__module__, k) self.fetch_item_content(key, m) elif inspect.isclass(m) and m.__module__.startswith(package_name): key = "%s.%s" % (mod.__name__, k) try: item_content = inspect.getsource(m) self.artifact.output_data.append("%s:doc" % key, inspect.getdoc(m)) self.artifact.output_data.append("%s:comments" % key, inspect.getcomments(m)) self.add_source_for_key(key, item_content) except IOError: self.log.debug("can't get source for %s" % key) self.add_source_for_key(key, "") try: for ck, cm in inspect.getmembers(m): key = "%s.%s.%s" % (name, k, ck) self.fetch_item_content(key, cm) except AttributeError: pass else: key = "%s.%s" % (name, k) self.fetch_item_content(key, m)
from inspect import getmembers, isfunction from yahoo_fin import stock_info functions_list = [o for o in getmembers(stock_info) if isfunction(o[1])]
from kivy.uix.popup import Popup from kivy.uix.label import Label # Import required modules from pathlib import Path import inspect import json # Import required user modules if Path('user/customPanels.py').is_file(): import user.customPanels # Define panel list including custom user panels if required customPanels = [] if Path('user/customPanels.py').is_file(): for cls in inspect.getmembers(user.customPanels, inspect.isclass): if cls[1].__module__ == 'user.customPanels' and 'Panel' in cls[0]: customPanels.append(cls[0].split('Panel')[0]) PanelList = [ 'Forecast', 'Sager', 'Temperature', 'WindSpeed', 'SunriseSunset', 'MoonPhase', 'Rainfall', 'Lightning', 'Barometer' ] primaryPanelList = PanelList + customPanels secondaryPanelList = ['None'] + PanelList + customPanels class ScrollOptions(SettingOptions): """ Define the ScrollOptions settings type """ def _create_popup(self, instance): # Create the popup and scrollview
def execute_tests(self): is_test_method = lambda x: inspect.ismethod(x) and x.__name__.startswith("test_") for category in sorted(self.categories): logger.start_category(category) for test in self.categories[category]: if not getattr(test, 'enabled', True): continue test_methods = map(lambda x: x[1], inspect.getmembers(test, is_test_method)) # def foo(x): # print(x) # inspect.getmembers(test, foo) # print(test.__dict__) def key(meth): try: if getattr(test, 'test_order', None): return test.test_order.index(meth.__name__) return 0 except ValueError: return len(test_methods) test_methods.sort(key=key) logger.start_test(test.description) if getattr(test, "init", None): test.init() for test_method in test_methods: try: test.start_test_method() if getattr(test, "before", None): test.before() test_method() if getattr(test, "after", None): test.after() except EndTestMethodException: continue except EndTestException: break except Exception as e: import traceback traceback.print_exc() test.add_exception(e) finally: test.end_test_method() f = open("%s.log" %test_method.__name__, 'w') for exception in test.method_exceptions: if isinstance(exception, AssertionError): f.write("#Assertion Error: " + exception.message + "\n") f.close() if len(self.partov_server.packets) > 0: wrpcap("%s.cap" %test_method.__name__ ,self.partov_server.packets) self.partov_server.packets = [] self.method_exceptions = [] for exception in test.exceptions: if isinstance(exception, AssertionError): logger.end_test(" " + exception.message, "red") break else: logger.end_test(" [EXCEPTION] " + str(exception), "red") break else: logger.end_test("", "green") if getattr(test, "end", None): test.end() logger.line_break()
def load_tests(self, test_module): is_test_class = lambda x: inspect.isclass(x) and issubclass(x, Test) and x != Test test_classes = inspect.getmembers(test_module, is_test_class) test_instances = map(lambda x: x[1](self.client_manager, self.partov_server, self.mock_server), test_classes) self.tests = test_instances self.categorize_tests(test_instances)
def get_classes(_name): return [ obj for name, obj in inspect.getmembers( sys.modules[_name], inspect.isclass) if obj.__module__ is _name ]
def __init__(self, args): self._args = args self._cloud_environment = None self._compute_client = None self._resource_client = None self._network_client = None self.debug = False if args.debug: self.debug = True self.credentials = self._get_credentials(args) if not self.credentials: self.fail( "Failed to get credentials. Either pass as parameters, set environment variables, " "or define a profile in ~/.azure/credentials.") # if cloud_environment specified, look up/build Cloud object raw_cloud_env = self.credentials.get('cloud_environment') if not raw_cloud_env: self._cloud_environment = azure_cloud.AZURE_PUBLIC_CLOUD # SDK default else: # try to look up "well-known" values via the name attribute on azure_cloud members all_clouds = [ x[1] for x in inspect.getmembers(azure_cloud) if isinstance(x[1], azure_cloud.Cloud) ] matched_clouds = [x for x in all_clouds if x.name == raw_cloud_env] if len(matched_clouds) == 1: self._cloud_environment = matched_clouds[0] elif len(matched_clouds) > 1: self.fail( "Azure SDK failure: more than one cloud matched for cloud_environment name '{0}'" .format(raw_cloud_env)) else: if not urlparse.urlparse(raw_cloud_env).scheme: self.fail( "cloud_environment must be an endpoint discovery URL or one of {0}" .format([x.name for x in all_clouds])) try: self._cloud_environment = azure_cloud.get_cloud_from_metadata_endpoint( raw_cloud_env) except Exception as e: self.fail( "cloud_environment {0} could not be resolved: {1}". format(raw_cloud_env, e.message)) if self.credentials.get('subscription_id', None) is None: self.fail("Credentials did not include a subscription_id value.") self.log("setting subscription_id") self.subscription_id = self.credentials['subscription_id'] if self.credentials.get('client_id') is not None and \ self.credentials.get('secret') is not None and \ self.credentials.get('tenant') is not None: self.azure_credentials = ServicePrincipalCredentials( client_id=self.credentials['client_id'], secret=self.credentials['secret'], tenant=self.credentials['tenant'], cloud_environment=self._cloud_environment) elif self.credentials.get( 'ad_user') is not None and self.credentials.get( 'password') is not None: tenant = self.credentials.get('tenant') if not tenant: tenant = 'common' self.azure_credentials = UserPassCredentials( self.credentials['ad_user'], self.credentials['password'], tenant=tenant, cloud_environment=self._cloud_environment) else: self.fail( "Failed to authenticate with provided credentials. Some attributes were missing. " "Credentials must include client_id, secret and tenant or ad_user and password." )
import inspect from pprint import pprint import example pprint(inspect.getmembers(example.A, inspect.isfunction))
def test_inspect_getmembers(self): # GH38740 df = DataFrame() with tm.assert_produces_warning(None): inspect.getmembers(df)
import sys import inspect if len(sys.argv) != 2: print("Usage: python {0} <name of module>\nExample: python3 {0} requests".format(sys.argv[0])) exit() module = __import__(sys.argv[1]) for object, value in inspect.getmembers(module, inspect.isfunction): if not object.startswith('_'): # Exclude internal or dunder methods try: # Prints functions that don’t require params print(object, module.__getattribute__(object)()) except Exception as e: # Prints functions that require params print(object, e)
def get_jobs(): """ Compile a dictionary of all jobs available across all modules in the jobs path(s). Returns an OrderedDict: { "local": { <module_name>: { "name": <human-readable module name>, "jobs": { <class_name>: <job_class>, <class_name>: <job_class>, ... }, }, <module_name>: { ... }, ... }, "git.<repository-slug>": { <module_name>: { ... }, }, ... "plugins": { <module_name>: { ... }, } } """ jobs = OrderedDict() paths = _get_job_source_paths() # Iterate over all groupings (local, git.<slug1>, git.<slug2>, etc.) for grouping, path_list in paths.items(): # Iterate over all modules (Python files) found in any of the directory paths identified for the given grouping for importer, module_name, _ in pkgutil.iter_modules(path_list): try: # Remove cached module to ensure consistency with filesystem if module_name in sys.modules: del sys.modules[module_name] # Dynamically import this module to make its contents (job(s)) available to Python module = importer.find_module(module_name).load_module(module_name) except Exception as exc: logger.error(f"Unable to load job {module_name}: {exc}") continue # For each module, we construct a dict {"name": module_name, "jobs": {"job_name": job_class, ...}} human_readable_name = module.name if hasattr(module, "name") else module_name module_jobs = {"name": human_readable_name, "jobs": OrderedDict()} # Get all Job subclasses (which includes Script and Report subclasses as well) in this module, # and add them to the dict for name, cls in inspect.getmembers(module, is_job): module_jobs["jobs"][name] = cls # If there were any Job subclasses found, add the module_jobs dict to the overall jobs dict # (otherwise skip it since there aren't any jobs in this module to report) if module_jobs["jobs"]: jobs.setdefault(grouping, {})[module_name] = module_jobs # Add jobs from plugins (which were already imported at startup) for cls in registry["plugin_jobs"]: module = inspect.getmodule(cls) human_readable_name = module.name if hasattr(module, "name") else module.__name__ jobs.setdefault("plugins", {}).setdefault(module.__name__, {"name": human_readable_name, "jobs": OrderedDict()}) jobs["plugins"][module.__name__]["jobs"][cls.__name__] = cls return jobs
finish_processes(processes) def ars_connection_str() -> str: return f"ws://0.0.0.0:{_arserver_port}" # TODO refactor this into _data packages event_mapping: Dict[str, Type[Event]] = {evt.__name__: evt for evt in EXE_EVENTS} modules = [] for _, mod in inspect.getmembers(events, inspect.ismodule): modules.append(mod) for mod in modules: for _, cls in inspect.getmembers(mod, inspect.isclass): if issubclass(cls, Event): event_mapping[cls.__name__] = cls @pytest.fixture() def ars() -> Iterator[ARServer]: with ARServer(ars_connection_str(), timeout=30, event_mapping=event_mapping) as ws: yield ws
def get_classes(module): # Return all classes in a given module return inspect.getmembers(module, inspect.isclass)
def setUpClass(cls): """Set up for the doc tests""" cls.sq_funcs = inspect.getmembers(Square, inspect.isfunction)
def check_signature(object_name, reference_object, other_object): """ Given a reference class or function check if an other class or function could be substituted without causing any instantiation/usage issues. @param object_name: the name of the object being checked. @type object_name: string @param reference_object: the reference class or function. @type reference_object: class/function @param other_object: the other class or function to be checked. @type other_object: class/function @raise InvenioPluginContainerError: in case the other object is not compatible with the reference object. """ try: if inspect.isclass(reference_object): ## if the reference_object is a class if inspect.isclass(other_object): ## if the other_object is a class if issubclass(other_object, reference_object): ## if the other_object is derived from the reference we ## should check for all the method in the former that ## exists in the the latter, wethever they recursively have ## the same signature. reference_object_map = dict( inspect.getmembers(reference_object, inspect.isroutine)) for other_method_name, other_method_code in \ inspect.getmembers(other_object, inspect.isroutine): if other_method_name in reference_object_map: check_signature( object_name, reference_object_map[other_method_name], other_method_code) else: ## if the other_object is not derived from the ## reference_object then all the method declared in the ## latter should exist in the former and they should ## recursively have the same signature. other_object_map = dict( inspect.getmembers(other_object, inspect.isroutine)) for reference_method_name, reference_method_code in \ inspect.getmembers( reference_object, inspect.isroutine): if reference_method_name in other_object_map: check_signature(object_name, reference_method_code, other_method_code) else: raise InvenioPluginContainerError( '"%s", which' ' exists in the reference class, does not' ' exist in the other class, and the reference' ' class is not an anchestor of the other' % reference_method_name) else: ## We are comparing apples and oranges! raise InvenioPluginContainerError( "%s (the reference object)" " is a class while %s (the other object) is not a class" % (reference_object, other_object)) elif inspect.isroutine(reference_object): ## if the reference_object is a function if inspect.isroutine(other_object): ## if the other_object is a function we will compare the ## reference_object and other_object function signautre i.e. ## their parameters. reference_args, reference_varargs, reference_varkw, \ reference_defaults = inspect.getargspec(reference_object) other_args, other_varargs, other_varkw, \ other_defaults = inspect.getargspec(other_object) ## We normalize the reference_defaults to be a list if reference_defaults is not None: reference_defaults = list(reference_defaults) else: reference_defaults = [] ## We normalize the other_defaults to be a list if other_defaults is not None: other_defaults = list(other_defaults) else: other_defaults = [] ## Check for presence of missing parameters in other function if not (other_varargs or other_varkw): for reference_arg in reference_args: if reference_arg not in other_args: raise InvenioPluginContainerError( 'Argument "%s"' ' in reference function %s does not exist in' ' the other function %s' % (reference_arg, reference_object, other_object)) ## Check for presence of additional parameters in other ## function if not (reference_varargs or reference_varkw): for other_arg in other_args: if other_arg not in reference_args: raise InvenioPluginContainerError( 'Argument "%s"' ' in other function %s does not exist in the' ' reference function %s' % (other_arg, other_object, reference_object)) ## Check sorting of arguments for reference_arg, other_arg in map(None, reference_args, other_args): if not ((reference_arg == other_arg) or (reference_arg is None and (reference_varargs or reference_varkw)) or (other_arg is None and (other_args or other_varargs))): raise InvenioPluginContainerError( 'Argument "%s" in' ' the other function is in the position of' ' argument "%s" in the reference function, i.e.' ' the order of arguments is not respected' % (other_arg, reference_arg)) if len(reference_defaults) != len(other_defaults) and \ not (reference_args or reference_varargs or other_args or other_varargs): raise InvenioPluginContainerError( "Default parameters in" " the other function are not corresponding to the" " default of parameters of the reference function") else: ## We are comparing apples and oranges! raise InvenioPluginContainerError( '%s (the reference object)' ' is a function while %s (the other object) is not a' ' function' % (reference_object, other_object)) except InvenioPluginContainerError, err: try: sourcefile = inspect.getsourcefile(other_object) sourceline = inspect.getsourcelines(other_object)[1] except IOError: ## other_object is not loaded from a real file sourcefile = 'N/A' sourceline = 'N/A' raise InvenioPluginContainerError( 'Error in checking signature for' ' "%s" as defined at "%s" (line %s): %s' % (object_name, sourcefile, sourceline, err))
def _program_list(self): program_list = inspect.getmembers(self.PROGRAM_MODULE, inspect.isclass) return program_list[1:]
def all_estimators(type_filter=None): """Get a list of all estimators from sklearn. This function crawls the module and gets all classes that inherit from BaseEstimator. Classes that are defined in test-modules are not included. By default meta_estimators such as GridSearchCV are also not included. Parameters ---------- type_filter : string, list of string, or None, default=None Which kind of estimators should be returned. If None, no filter is applied and all estimators are returned. Possible values are 'classifier', 'regressor', 'cluster' and 'transformer' to get estimators only of these specific types, or a list of these to get the estimators that fit at least one of the types. Returns ------- estimators : list of tuples List of (name, class), where ``name`` is the class name as string and ``class`` is the actuall type of the class. """ # lazy import to avoid circular imports from sklearn.base from ._testing import ignore_warnings from ..base import (BaseEstimator, ClassifierMixin, RegressorMixin, TransformerMixin, ClusterMixin) def is_abstract(c): if not (hasattr(c, '__abstractmethods__')): return False if not len(c.__abstractmethods__): return False return True all_classes = [] modules_to_ignore = {"tests", "externals", "setup", "conftest"} root = str(Path(__file__).parent.parent) # sklearn package # Ignore deprecation warnings triggered at import time and from walking # packages with ignore_warnings(category=FutureWarning): for importer, modname, ispkg in pkgutil.walk_packages( path=[root], prefix='sklearn.'): mod_parts = modname.split(".") if (any(part in modules_to_ignore for part in mod_parts) or '._' in modname): continue module = import_module(modname) classes = inspect.getmembers(module, inspect.isclass) classes = [(name, est_cls) for name, est_cls in classes if not name.startswith("_")] # TODO: Remove when FeatureHasher is implemented in PYPY # Skips FeatureHasher for PYPY if IS_PYPY and 'feature_extraction' in modname: classes = [(name, est_cls) for name, est_cls in classes if name == "FeatureHasher"] all_classes.extend(classes) all_classes = set(all_classes) estimators = [ c for c in all_classes if (issubclass(c[1], BaseEstimator) and c[0] != 'BaseEstimator') ] # get rid of abstract base classes estimators = [c for c in estimators if not is_abstract(c[1])] if type_filter is not None: if not isinstance(type_filter, list): type_filter = [type_filter] else: type_filter = list(type_filter) # copy filtered_estimators = [] filters = { 'classifier': ClassifierMixin, 'regressor': RegressorMixin, 'transformer': TransformerMixin, 'cluster': ClusterMixin } for name, mixin in filters.items(): if name in type_filter: type_filter.remove(name) filtered_estimators.extend( [est for est in estimators if issubclass(est[1], mixin)]) estimators = filtered_estimators if type_filter: raise ValueError("Parameter type_filter must be 'classifier', " "'regressor', 'transformer', 'cluster' or " "None, got" " %s." % repr(type_filter)) # drop duplicates, sort for reproducibility # itemgetter is used to ensure the sort does not extend to the 2nd item of # the tuple return sorted(set(estimators), key=itemgetter(0))
# modification, are permitted provided that the following conditions are # met: redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer; # redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution; # neither the name of the copyright holders nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # Authors: Nathan Binkert import inspect import _m5 for name, module in inspect.getmembers(_m5): if name.startswith('param_') or name.startswith('enum_'): exec("from _m5.%s import *" % name)
def _remote(self, args=None, kwargs=None, num_cpus=None, num_gpus=None, memory=None, object_store_memory=None, resources=None, accelerator_type=None, max_concurrency=None, max_restarts=None, max_task_retries=None, name=None, lifetime=None, placement_group=None, placement_group_bundle_index=-1, placement_group_capture_child_tasks=None, override_environment_variables=None): """Create an actor. This method allows more flexibility than the remote method because resource requirements can be specified and override the defaults in the decorator. Args: args: The arguments to forward to the actor constructor. kwargs: The keyword arguments to forward to the actor constructor. num_cpus: The number of CPUs required by the actor creation task. num_gpus: The number of GPUs required by the actor creation task. memory: Restrict the heap memory usage of this actor. object_store_memory: Restrict the object store memory used by this actor when creating objects. resources: The custom resources required by the actor creation task. max_concurrency: The max number of concurrent calls to allow for this actor. This only works with direct actor calls. The max concurrency defaults to 1 for threaded execution, and 1000 for asyncio execution. Note that the execution order is not guaranteed when max_concurrency > 1. name: The globally unique name for the actor, which can be used to retrieve the actor via ray.get_actor(name) as long as the actor is still alive. lifetime: Either `None`, which defaults to the actor will fate share with its creator and will be deleted once its refcount drops to zero, or "detached", which means the actor will live as a global object independent of the creator. placement_group: the placement group this actor belongs to, or None if it doesn't belong to any group. placement_group_bundle_index: the index of the bundle if the actor belongs to a placement group, which may be -1 to specify any available bundle. placement_group_capture_child_tasks: Whether or not children tasks of this actor should implicitly use the same placement group as its parent. It is True by default. override_environment_variables: Environment variables to override and/or introduce for this actor. This is a dictionary mapping variable names to their values. Returns: A handle to the newly created actor. """ if args is None: args = [] if kwargs is None: kwargs = {} meta = self.__ray_metadata__ actor_has_async_methods = len( inspect.getmembers(meta.modified_class, predicate=inspect.iscoroutinefunction)) > 0 is_asyncio = actor_has_async_methods if max_concurrency is None: if is_asyncio: max_concurrency = 1000 else: max_concurrency = 1 if max_concurrency < 1: raise ValueError("max_concurrency must be >= 1") if client_mode_should_convert(): return client_mode_convert_actor( self, args, kwargs, num_cpus=num_cpus, num_gpus=num_gpus, memory=memory, object_store_memory=object_store_memory, resources=resources, accelerator_type=accelerator_type, max_concurrency=max_concurrency, max_restarts=max_restarts, max_task_retries=max_task_retries, name=name, lifetime=lifetime, placement_group=placement_group, placement_group_bundle_index=placement_group_bundle_index, placement_group_capture_child_tasks=( placement_group_capture_child_tasks), override_environment_variables=( override_environment_variables)) worker = ray.worker.global_worker worker.check_connected() if name is not None: if not isinstance(name, str): raise TypeError( f"name must be None or a string, got: '{type(name)}'.") if name == "": raise ValueError("Actor name cannot be an empty string.") # Check whether the name is already taken. # TODO(edoakes): this check has a race condition because two drivers # could pass the check and then create the same named actor. We should # instead check this when we create the actor, but that's currently an # async call. if name is not None: try: ray.get_actor(name) except ValueError: # Name is not taken. pass else: raise ValueError( f"The name {name} is already taken. Please use " "a different name or get the existing actor using " f"ray.get_actor('{name}')") if lifetime is None: detached = False elif lifetime == "detached": detached = True else: raise ValueError( "actor `lifetime` argument must be either `None` or 'detached'" ) if placement_group_capture_child_tasks is None: placement_group_capture_child_tasks = ( worker.should_capture_child_tasks_in_placement_group) if placement_group is None: if placement_group_capture_child_tasks: placement_group = get_current_placement_group() if not placement_group: placement_group = PlacementGroup.empty() check_placement_group_index(placement_group, placement_group_bundle_index) # Set the actor's default resources if not already set. First three # conditions are to check that no resources were specified in the # decorator. Last three conditions are to check that no resources were # specified when _remote() was called. if (meta.num_cpus is None and meta.num_gpus is None and meta.resources is None and meta.accelerator_type is None and num_cpus is None and num_gpus is None and resources is None and accelerator_type is None): # In the default case, actors acquire no resources for # their lifetime, and actor methods will require 1 CPU. cpus_to_use = ray_constants.DEFAULT_ACTOR_CREATION_CPU_SIMPLE actor_method_cpu = ray_constants.DEFAULT_ACTOR_METHOD_CPU_SIMPLE else: # If any resources are specified (here or in decorator), then # all resources are acquired for the actor's lifetime and no # resources are associated with methods. cpus_to_use = (ray_constants.DEFAULT_ACTOR_CREATION_CPU_SPECIFIED if meta.num_cpus is None else meta.num_cpus) actor_method_cpu = ray_constants.DEFAULT_ACTOR_METHOD_CPU_SPECIFIED # LOCAL_MODE cannot handle cross_language if worker.mode == ray.LOCAL_MODE: assert not meta.is_cross_language, \ "Cross language ActorClass cannot be executed locally." # Export the actor. if not meta.is_cross_language and (meta.last_export_session_and_job != worker.current_session_and_job): # If this actor class was not exported in this session and job, # we need to export this function again, because current GCS # doesn't have it. meta.last_export_session_and_job = (worker.current_session_and_job) # After serialize / deserialize modified class, the __module__ # of modified class will be ray.cloudpickle.cloudpickle. # So, here pass actor_creation_function_descriptor to make # sure export actor class correct. worker.function_actor_manager.export_actor_class( meta.modified_class, meta.actor_creation_function_descriptor, meta.method_meta.methods.keys()) resources = ray.utils.resources_from_resource_arguments( cpus_to_use, meta.num_gpus, meta.memory, meta.object_store_memory, meta.resources, meta.accelerator_type, num_cpus, num_gpus, memory, object_store_memory, resources, accelerator_type) # If the actor methods require CPU resources, then set the required # placement resources. If actor_placement_resources is empty, then # the required placement resources will be the same as resources. actor_placement_resources = {} assert actor_method_cpu in [0, 1] if actor_method_cpu == 1: actor_placement_resources = resources.copy() actor_placement_resources["CPU"] += 1 if meta.is_cross_language: creation_args = cross_language.format_args(worker, args, kwargs) else: function_signature = meta.method_meta.signatures["__init__"] creation_args = signature.flatten_args(function_signature, args, kwargs) actor_id = worker.core_worker.create_actor( meta.language, meta.actor_creation_function_descriptor, creation_args, max_restarts or meta.max_restarts, max_task_retries or meta.max_task_retries, resources, actor_placement_resources, max_concurrency, detached, name if name is not None else "", is_asyncio, placement_group.id, placement_group_bundle_index, placement_group_capture_child_tasks, # Store actor_method_cpu in actor handle's extension data. extension_data=str(actor_method_cpu), override_environment_variables=override_environment_variables or dict()) actor_handle = ActorHandle(meta.language, actor_id, meta.method_meta.decorators, meta.method_meta.signatures, meta.method_meta.num_returns, actor_method_cpu, meta.actor_creation_function_descriptor, worker.current_session_and_job, original_handle=True) return actor_handle
def _build_urls(self): self._urls = [] members = inspect.getmembers(self, predicate=inspect.ismethod) for n, m in members: if hasattr(m, '__routes__'): self.__register(getattr(self, n))
"""Initilization procedure for `Sampler` classes.""" import inspect import os import sys path = os.path.dirname(os.path.abspath(__file__)) __all__ = [] for py in [ f[:-3] for f in os.listdir(path) if f.endswith('.py') and f != '__init__.py' ]: mod = __import__('.'.join([__name__, py]), fromlist=[py]) classes = [ x[1] for x in inspect.getmembers(mod) if (inspect.isroutine(x[1]) or inspect.isclass(x[1])) and inspect.getmodule(x[1]) == mod ] for cls in classes: __all__.append(cls.__name__) setattr(sys.modules[__name__], cls.__name__, cls)
def _get_object_properties(obj): return {prop[0] for prop in inspect.getmembers(obj, _is_property)}
# Redirect stdout and stderr sys.stdout = open('/tmp/work/output/stdout', 'w') sys.stderr = open('/tmp/work/output/stderr', 'w') # Try to import student's code sys.path.append('/tmp/work') try: import q1 except Exception as e: print(e, file=sys.stderr) sys.exit(0) # Execute student's code try: find = [f for (n, f) in inspect.getmembers(q1, inspect.isfunction) if n == 'printhelloworld'] if len(find) != 1: raise pythia.UndeclaredException('printhelloworld') if not callable(find[0]): raise pythia.BadTypeException('printhelloworld', type(find[0]), 'function') spec = inspect.getargspec(find[0]) if len(spec.args) != 0: raise pythia.WrongParameterNumberException('printhelloworld', len(spec.args), 0) q1.printhelloworld() except pythia.UndeclaredException as e: print('exception:undeclared:{}'.format(e.name), file=sys.stderr) except pythia.BadTypeException as e: print('exception:badtype:{}:{}:{}'.format(e.name, e.actualtype, e.expectedtype), file=sys.stderr) except pythia.WrongParameterNumberException as e: print('exception:wrongparameterexception:{}:{}:{}'.format(e.name, e.actualnumber, e.expectednumber), file=sys.stderr) except Exception as e:
def clean_up_factories(): """Helper function to reset_sequence on object.""" for name, obj in inspect.getmembers(factories): if inspect.isclass(obj) and "factory" in name.lower(): obj.reset_sequence(0)
def ihelp_menu(function_list,box_style='warning', to_embed=False):#, to_file=False):#, json_file='ihelp_output.txt' ): """ Creates a widget menu of the source code and and help documentation of the functions in function_list. Args: function_list (list): list of function object or string names of loaded function. to_embed (bool, optional): Returns interface (layout,output) if True. Defaults to False. to_file (bool, optional): Save . Defaults to False. json_file (str, optional): [description]. Defaults to 'ihelp_output.txt'. Returns: full_layout (ipywidgets GridBox): Layout of interface. output () """ # Accepts a list of string names for loaded modules/functions to save the `help` output and # inspect.getsource() outputs to dictionary for later reference and display ## One way using sys to write txt file import pandas as pd import sys import inspect from io import StringIO from IPython.display import display,Markdown notebook_output = sys.stdout result = StringIO() sys.stdout=result ## Turn single input into a list if isinstance(function_list,list)==False: function_list = [function_list] ## Make a dictionary of{function_name : function_object} functions_dict = dict() for fun in function_list: ## if input is a string, save string as key, and eval(function) as value if isinstance(fun, str): functions_dict[fun] = eval(fun) ## if input is a function, get the name of function using inspect and make key, function as value elif inspect.isfunction(fun): members= inspect.getmembers(fun) member_df = pd.DataFrame(members,columns=['param','values']).set_index('param') fun_name = member_df.loc['__name__'].values[0] functions_dict[fun_name] = fun ## Create an output dict to store results for functions output_dict = {} for fun_name, real_func in functions_dict.items(): output_dict[fun_name] = {} ## First save help help(real_func) output_dict[fun_name]['help'] = result.getvalue() ## Clear contents of io stream result.truncate(0) try: ## Next save source source_DF = inspect.getsource(real_func) # # if markdown == True: # output = "```python" +'\n'+source_DF+'\n'+"```" # display(Markdown(output)) # else: # output=source_DF print(source_DF) # output_dict[fun_name]['source'] = source_DF # print(inspect.getsource(real_func)) #eval(fun)))###f"{eval(fun)}")) except: # print("Source code for object was not found") print("Source code for object was not found") # finally: output_dict[fun_name]['source'] = result.getvalue() ## clear contents of io stream result.truncate(0) ## Get file location try: file_loc = inspect.getfile(real_func) print(file_loc) except: print("File location not found") output_dict[fun_name]['file_location'] =result.getvalue() ## clear contents of io stream result.truncate(0) ## Reset display back to notebook sys.stdout = notebook_output # if to_file==True: # with open(json_file,'w') as f: # import json # json.dump(output_dict,f) ## CREATE INTERACTIVE MENU from ipywidgets import interact, interactive, interactive_output import ipywidgets as widgets from IPython.display import display # from functions_combined_BEST import ihelp # import functions_combined_BEST as ji ## Check boxes check_help = widgets.Checkbox(description="Show 'help(func)'",value=True) check_source = widgets.Checkbox(description="Show source code",value=True) check_fileloc=widgets.Checkbox(description="Show source filepath",value=False) check_boxes = widgets.HBox(children=[check_help,check_source,check_fileloc]) ## dropdown menu (dropdown, label, button) dropdown = widgets.Dropdown(options=list(output_dict.keys())) label = widgets.Label('Function Menu') button = widgets.ToggleButton(description='Show/hide',value=False) ## Putting it all together title = widgets.Label('iHelp Menu: View Help and/or Source Code') menu = widgets.HBox(children=[label,dropdown,button]) titled_menu = widgets.VBox(children=[title,menu]) full_layout = widgets.GridBox(children=[titled_menu,check_boxes],box_style=box_style) ## Define output manager # show_output = widgets.Output() def dropdown_event(change): new_key = change.new output_display = output_dict[new_key] dropdown.observe(dropdown_event,names='values') def show_ihelp(display_help=button.value,function=dropdown.value, show_help=check_help.value,show_code=check_source.value, show_file=check_fileloc.value,ouput_dict=output_dict): from IPython.display import Markdown # import functions_combined_BEST as ji from IPython.display import display page_header = '---'*28 # import json # with open(json_file,'r') as f: # output_dict = json.load(f) func_dict = output_dict[function] source_code=None if display_help: if show_help: # display(print(func_dict['help'])) print(page_header) banner = ''.join(["---"*2,' HELP ',"---"*24,'\n']) print(banner) print(func_dict['help']) if show_code: print(page_header) banner = ''.join(["---"*2,' SOURCE -',"---"*23]) print(banner) source_code = func_dict['source']#.encode('utf-8') if source_code.startswith('`'): source_code = source_code.replace('`',"").encode('utf-8') if 'google.colab' in sys.modules: print(source_code) else: md_source = "```python\n"+source_code md_source += "```" display(Markdown(md_source)) if show_file: print(page_header) banner = ''.join(["---"*2,' FILE LOCATION ',"---"*21]) print(banner) file_loc = func_dict['file_location'] print(file_loc) if show_help==False & show_code==False & show_file==False: display('Check at least one "Show" checkbox for output.') else: display('Press "Show/hide" for display') ## Fully integrated output output = widgets.interactive_output(show_ihelp,{'display_help':button, 'function':dropdown, 'show_help':check_help, 'show_code':check_source, 'show_file':check_fileloc}) if to_embed: return full_layout, output else: display(full_layout, output)
def _add_version_doc(): from inspect import getmembers, isfunction from pyflink.table import expressions for o in getmembers(expressions): if isfunction(o[1]) and not o[0].startswith('_'): add_version_doc(o[1], "1.12.0")
# GNU Affero General Public License for more details. # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <https://www.gnu.org/licenses/>. import logging import inspect import itertools import ast import telethon from .. import loader, utils tlfuns = dict(filter(lambda mod: mod[1].__module__.startswith("telethon.tl.functions"), itertools.chain.from_iterable([inspect.getmembers(mod[1], inspect.isclass) for mod in inspect.getmembers(telethon.tl.functions, inspect.ismodule)]))) logger = logging.getLogger(__name__) def register(cb): cb(RemoteMod()) @loader.tds class RemoteMod(loader.Module): """Operate on other accounts""" strings = {"name": "Remote Control", "account_cfg_doc": "What to call this account in .remote commands",
def module_classes(module): return [ cls for class_name, cls in inspect.getmembers(module, inspect.isclass) ]
class BenchGlobal(Benchmark): """ Benchmark the global optimizers using the go_benchmark_functions suite """ timeout = 300 _functions = dict([ item for item in inspect.getmembers(gbf, inspect.isclass) if (issubclass(item[1], gbf.Benchmark) and item[0] not in ( 'Benchmark') and not item[0].startswith('Problem')) ]) if not is_xslow(): _enabled_functions = [] elif 'SCIPY_GLOBAL_BENCH' in os.environ: _enabled_functions = [ x.strip() for x in os.environ['SCIPY_GLOBAL_BENCH'].split(',') ] else: _enabled_functions = list(_functions.keys()) params = [ list(_functions.keys()), ["success%", "<nfev>"], ['DE', 'basinh.', 'DA'], ] param_names = ["test function", "result type", "solver"] def __init__(self): self.enabled = is_xslow() try: self.numtrials = int(os.environ['SCIPY_GLOBAL_BENCH_NUMTRIALS']) except (KeyError, ValueError): self.numtrials = 100 self.dump_fn = os.path.join(os.path.dirname(__file__), '..', 'global-bench-results.json') self.results = {} def setup(self, name, ret_value, solver): if name not in self._enabled_functions: raise NotImplementedError("skipped") # load json backing file with open(self.dump_fn, 'r') as f: self.results = json.load(f) def teardown(self, name, ret_value, solver): if not self.enabled: return with open(self.dump_fn, 'w') as f: json.dump(self.results, f, indent=2, sort_keys=True) def track_all(self, name, ret_value, solver): if name in self.results and solver in self.results[name]: # have we done the function, and done the solver? # if so, then just return the ret_value av_results = self.results[name] if ret_value == 'success%': return 100 * av_results[solver]['nsuccess'] / av_results[ solver]['ntrials'] elif ret_value == '<nfev>': return av_results[solver]['mean_nfev'] else: raise ValueError() klass = self._functions[name] f = klass() try: b = _BenchOptimizers.from_funcobj(name, f) with np.errstate(all='ignore'): b.bench_run_global(methods=[solver], numtrials=self.numtrials) av_results = b.average_results() if name not in self.results: self.results[name] = {} self.results[name][solver] = av_results[solver] if ret_value == 'success%': return 100 * av_results[solver]['nsuccess'] / av_results[ solver]['ntrials'] elif ret_value == '<nfev>': return av_results[solver]['mean_nfev'] else: raise ValueError() except Exception: print("".join(traceback.format_exc())) self.results[name] = "".join(traceback.format_exc()) def setup_cache(self): if not self.enabled: return # create the logfile to start with with open(self.dump_fn, 'w') as f: json.dump({}, f, indent=2)
'H2OKMeansEstimator', 'H2OPrincipalComponentAnalysisEstimator', 'H2OSingularValueDecompositionEstimator', 'H2OTargetEncoderEstimator', 'H2OWord2vecEstimator', ) _classifier_only_estimators = ( 'H2ONaiveBayesEstimator', 'H2OSupportVectorMachineEstimator', ) _regressor_only_estimators = ('H2OCoxProportionalHazardsEstimator', ) gen_estimators = [] for mod in [automl, estimators]: submodule = mod.__name__.split('.')[-1] for name, cls in inspect.getmembers(mod, inspect.isclass): if name in _excluded_estimators: continue gen_estimators.append(make_estimator(cls, submodule=submodule)) if name not in _generic_only_estimators: if name not in _regressor_only_estimators: gen_estimators.append(make_classifier(cls, submodule=submodule)) if name not in _classifier_only_estimators: gen_estimators.append(make_regressor(cls, submodule=submodule)) for mod in [transforms]: submodule = mod.__name__.split('.')[-1] for name, cls in inspect.getmembers(mod, inspect.isclass): if name in _excluded_estimators: continue
def pytest_collection_modifyitems(items, config): """ This function is called upon during the pytest test collection phase and allows for modification of the test items within the list """ collect_only = config.getoption("--collect-only") cassandra_dir = config.getoption("--cassandra-dir") cassandra_version = config.getoption("--cassandra-version") if not collect_only and cassandra_dir is None: if cassandra_version is None: raise Exception( "Required dtest arguments were missing! You must provide either --cassandra-dir " "or --cassandra-version. Refer to the documentation or invoke the help with --help." ) # Either cassandra_version or cassandra_dir is defined, so figure out the version CASSANDRA_VERSION = cassandra_version or get_version_from_build( cassandra_dir) # Check that use_off_heap_memtables is supported in this c* version if config.getoption("--use-off-heap-memtables") and ( "3.0" <= CASSANDRA_VERSION < "3.4"): raise Exception( "The selected Cassandra version %s doesn't support the provided option " "--use-off-heap-memtables, see https://issues.apache.org/jira/browse/CASSANDRA-9472 " "for details" % CASSANDRA_VERSION) selected_items = [] deselected_items = [] sufficient_system_resources_resource_intensive = sufficient_system_resources_for_resource_intensive_tests( ) logger.debug("has sufficient resources? %s" % sufficient_system_resources_resource_intensive) for item in items: deselect_test = False if item.get_closest_marker("resource_intensive") and not collect_only: force_resource_intensive = config.getoption( "--force-resource-intensive-tests") skip_resource_intensive = config.getoption( "--skip-resource-intensive-tests") if not force_resource_intensive: if skip_resource_intensive: deselect_test = True logger.info( "SKIP: Deselecting test %s as test marked resource_intensive. To force execution of " "this test re-run with the --force-resource-intensive-tests command line argument" % item.name) if not sufficient_system_resources_resource_intensive: deselect_test = True logger.info( "SKIP: Deselecting resource_intensive test %s due to insufficient system resources" % item.name) if item.get_closest_marker("no_vnodes"): if config.getoption("--use-vnodes"): deselect_test = True logger.info( "SKIP: Deselecting test %s as the test requires vnodes to be disabled. To run this test, " "re-run without the --use-vnodes command line argument" % item.name) if item.get_closest_marker("vnodes"): if not config.getoption("--use-vnodes"): deselect_test = True logger.info( "SKIP: Deselecting test %s as the test requires vnodes to be enabled. To run this test, " "re-run with the --use-vnodes command line argument" % item.name) for test_item_class in inspect.getmembers(item.module, inspect.isclass): if not hasattr(test_item_class[1], "pytestmark"): continue for module_pytest_mark in test_item_class[1].pytestmark: if module_pytest_mark.name == "upgrade_test": if not config.getoption("--execute-upgrade-tests"): deselect_test = True if item.get_closest_marker("upgrade_test"): if not config.getoption("--execute-upgrade-tests"): deselect_test = True if item.get_closest_marker("no_offheap_memtables"): if config.getoption("use_off_heap_memtables"): deselect_test = True # deselect cqlsh tests that depend on fixing a driver behavior if item.get_closest_marker("depends_driver"): deselect_test = True if deselect_test: deselected_items.append(item) else: selected_items.append(item) config.hook.pytest_deselected(items=deselected_items) items[:] = selected_items