Example #1
0
File: page.py Project: aih/QuoteBin
    def get_css_js(self):
        css=[]
        js=[]
        for e in self.__dict__.values():            
            if isinstance(e, list):
                for b in e:
                    if inspect.isclass(b) and (isinstance(b, Container) or issubclass(b, Container)):
                        css.extend(b.get_css())
                        js.extend(b.get_js())
                continue

            if inspect.isclass(e) and (isinstance(e, Container) or issubclass(e, Container)):
                css.extend(e.get_css())
                js.extend(e.get_js())
        #js = self.unique_list(js)
        #print "hello"
        #xs = []
        #jsdata = ''
        #for e in js:
            #if e.startswith('http'):
                #xs.append(e)
                #continue
            #try:
                #f=open(settings.PROJECT_PATH + e)
                #jsdata += '\n//' + e + '\n' + f.read() + '\n'
                #f.close()
            #except:pass
        #of = open(settings.MEDIA_ROOT + '/js/temp' + self.page_id + '.js', 'w')
        #if settings.DEBUG:
            #of.write(jsdata)
        #else:
            #of.write(jsmin(jsdata))
        #of.close()
        #js=xs + ['/static/js/temp' + self.page_id + '.js']
        return self.unique_list(css), self.unique_list(js)
    def getmethods(self,modulePath,Class) :
        '''
         This will get the list of methods in given module or class.
         It accepts the module path and class name. If there is no
         class name then it has be mentioned as None.
        '''
        methodList = []
        moduleList = modulePath.split("/")
        newModule = ".".join([moduleList[len(moduleList) - 2],moduleList[len(moduleList) - 1]])
        print "Message : Method list is being obatined , Please wait ..."
        try :
            if Class :
                Module = __import__(moduleList[len(moduleList) - 1], globals(), locals(), [Class], -1)
                ClassList = [x.__name__ for x in Module.__dict__.values() if inspect.isclass(x)]
                self.ClassList = ClassList
                Class = vars(Module)[Class]
                methodList = [x.__name__ for x in Class.__dict__.values() if inspect.isfunction(x)]
            else :
                Module = __import__(moduleList[len(moduleList) - 1], globals(), locals(),[moduleList[len(moduleList) - 2]], -1)
                methodList = [x.__name__ for x in Module.__dict__.values() if inspect.isfunction(x)]
                ClassList = [x.__name__ for x in Module.__dict__.values() if inspect.isclass(x)]
                self.ClassList = ClassList
        except :
            print "Error : " +str(sys.exc_info()[1])


        self.method = methodList
        return self.method
Example #3
0
def print_classes():
    for name, obj in inspect.getmembers(jb):
        if inspect.isclass(obj):
            #f1.write('paichi_the_found'
            f1.write('\n')
            f1.write("Class      Name ->  "+name)
            f1.write('\n')
            for N, O in inspect.getmembers(obj):
            	#f1.write(' --- Extra --- '
            	if inspect.isclass(O):
            		f1.write("      "+"SubClass      Name ->  "+N)
            		f1.write('\n')
            	if inspect.ismethod(O):
            		f1.write("      "+"SubMethod     Name ->  "+N)
            		f1.write('\n')
            	if inspect.isfunction(O):
            		f1.write("      "+"SubFunction   Name ->  "+N)
            		f1.write('\n')
        if inspect.ismethod(obj):
            #f1.write('paichi_the_found'
            f1.write('')
            f1.write('\n')
            f1.write("Method     Name ->  "+name)
            f1.write('\n')
        if inspect.isfunction(obj):
            #f1.write('paichi_the_found'
            f1.write('')
            f1.write('\n')
            f1.write("Function   Name ->  "+name)
            f1.write('\n')
Example #4
0
    def __init__(self, MetaDataClass, QMetaDataClass, referencer, cdmArchive, thumbnailManager):
        '''
        Construct the meta data base service for the provided classes.

        @param MetaDataClass: class
            A class that extends MetaData meta class.
        @param QMetaDataClass: class
            A class that extends QMetaData API class.
        @param referencer: IMetaDataReferencer
            The referencer to provide the references in the meta data.
        @param cdmArchive: ICDM
            The CDM used for current media archive type
        @param thumbnailManager: IThumbnailManager
            The thumbnail manager used to manage the current media archive type    
        '''
        assert isclass(MetaDataClass) and issubclass(MetaDataClass, MetaDataMapped), \
        'Invalid meta data class %s' % MetaDataClass
        assert isclass(QMetaDataClass) and issubclass(QMetaDataClass, QMetaData), \
        'Invalid meta data query class %s' % QMetaDataClass
        assert isinstance(referencer, IMetaDataReferencer), 'Invalid referencer %s' % referencer
        assert isinstance(cdmArchive, ICDM), 'Invalid CDM %s' % self.searchProvider
        assert isinstance(thumbnailManager, IThumbnailManager), 'Invalid video meta data service %s' % thumbnailManager

        self.MetaData = MetaDataClass
        self.QMetaData = QMetaDataClass
        self.referencer = referencer
        self.cdmArchive = cdmArchive
        self.thumbnailManager = thumbnailManager
	def _executeOperation(self,operation,method=''):
		""" Private method that executes operations of web service """
		params = []
		response = None
		res = None
		typesinput = getattr(operation,'_input')
		args  = getattr(operation,'_args')

		if inspect.isclass(typesinput) and issubclass(typesinput,complextypes.ComplexType):
			obj = self._parseComplexType(typesinput,self._request.getBody()[0],method=method)
			response = operation(obj)
		elif hasattr(operation,'_inputArray') and getattr(operation,'_inputArray'):
			params = self._parseParams(self._request.getBody()[0],typesinput,args)
			response = operation(params)
		else:
			params = self._parseParams(self._request.getBody()[0],typesinput,args)
			response = operation(*params)
		is_array = None
		if hasattr(operation,'_outputArray') and getattr(operation,'_outputArray'):
			is_array = getattr(operation,'_outputArray')

		typesoutput = getattr(operation,'_output')
		if inspect.isclass(typesoutput) and issubclass(typesoutput,complextypes.ComplexType):
			res = self._createReturnsComplexType(response)
		else:
			res = self._createReturns(response,is_array)

		return res
Example #6
0
    def build_view_info(self, view):
        info = self.info
        action = self.action

        if inspect.isclass(info.context):
            factory = info.context
        else:
            factory = action.args[0]

        if inspect.isclass(factory):
            factory = '%s.%s'%(factory.__module__,
                               factory.__name__)
        else:
            factory = factory.__name__

        data = OrderedDict(
            (('name', self.view_name),
             ('view-factory', factory),
             ('python-module', info.module.__name__),
             ('python-module-location', info.module.__file__),
             ('python-module-line', info.codeinfo[1]),
             ('template', 'unset'),
             ('context', '%s.%s'%(self.context.__class__.__module__,
                                  self.context.__class__.__name__)),
             ('context-path', self.request.resource_url(self.context)),
             ))

        if hasattr(view, 'template'):
            data['template'] = str(view.template)

        return data
Example #7
0
    def transform_params_to_database(self, params):
        """
        Takes a dictionary of parameters and recursively translates them into parameters appropriate for sending over
        Rexpro.

        :param params: The parameters to be sent to the function
        :type params: dict
        :rtype: dict

        """
        from mogwai.models.element import BaseElement
        from mogwai.models import Edge, Vertex
        from mogwai.properties import DateTime, Decimal, UUID

        if isinstance(params, dict):
            return {k: self.transform_params_to_database(v) for k, v in iteritems(params)}
        if isinstance(params, array_types):
            return [self.transform_params_to_database(x) for x in params]
        if isinstance(params, BaseElement):
            return params._id
        if inspect.isclass(params) and issubclass(params, Edge):
            return params.label
        if inspect.isclass(params) and issubclass(params, Vertex):
            return params.element_type
        if isinstance(params, datetime):
            return DateTime().to_database(params)
        if isinstance(params, _UUID):
            return UUID().to_database(params)
        if isinstance(params, _Decimal):
            return Decimal().to_database(params)
        return params
Example #8
0
def help(item):
    # If this item belongs to the GPS module, extract its documentation
    # from the module GPS_doc
    if inspect.getmodule(item) == GPS:
        class_name = None

        if inspect.isclass(item):
            class_name = item.__name__
        elif inspect.isclass(item):
            class_name = item.__class__.__name__
        else:
            # item might be a method of a class. No choice here but to iterate
            # over all classes in GPS and attempt to find the parent.
            item_name = item.__name__

            for class_name in dir(GPS):
                klass = getattr(GPS, class_name)
                if inspect.isclass(klass):
                    if (hasattr(klass, item_name)
                            and getattr(klass, item_name) == item):
                        class_name = klass.__name__ + '.' + item_name
                        break

        if class_name:
            print helpdoc(class_name)
            return

    # fallback on the default help function
    default_help(item)
Example #9
0
    def _create_check_conan(self, conan_file, consumer, conan_file_path, output, filename):
        """ Check the integrity of a given conanfile
        """
        result = None
        for name, attr in conan_file.__dict__.items():
            if "_" in name:
                continue
            if (inspect.isclass(attr) and issubclass(attr, ConanFile) and attr != ConanFile and
                    attr.__dict__["__module__"] == filename):
                if result is None:
                    # Actual instantiation of ConanFile object
                    result = attr(output, self._runner,
                                  self._settings.copy(), os.path.dirname(conan_file_path))
                else:
                    raise ConanException("More than 1 conanfile in the file")
            if (inspect.isclass(attr) and issubclass(attr, Generator) and attr != Generator and
                    attr.__dict__["__module__"] == filename):
                    _save_generator(attr.__name__, attr)

        if result is None:
            raise ConanException("No subclass of ConanFile")

        # check name and version were specified
        if not consumer:
            if not hasattr(result, "name") or not result.name:
                raise ConanException("conanfile didn't specify name")
            if not hasattr(result, "version") or not result.version:
                raise ConanException("conanfile didn't specify version")

        return result
Example #10
0
def import_classes(name, currmodule):
    # type: (unicode, unicode) -> Any
    """Import a class using its fully-qualified *name*."""
    target = None

    # import class or module using currmodule
    if currmodule:
        target = try_import(currmodule + '.' + name)

    # import class or module without currmodule
    if target is None:
        target = try_import(name)

    if target is None:
        raise InheritanceException(
            'Could not import class or module %r specified for '
            'inheritance diagram' % name)

    if inspect.isclass(target):
        # If imported object is a class, just return it
        return [target]
    elif inspect.ismodule(target):
        # If imported object is a module, return classes defined on it
        classes = []
        for cls in target.__dict__.values():
            if inspect.isclass(cls) and cls.__module__ == target.__name__:
                classes.append(cls)
        return classes
    raise InheritanceException('%r specified for inheritance diagram is '
                               'not a class or module' % name)
def deduce_swagger_type(python_type_or_object, nested_type=None):
    import inspect

    if inspect.isclass(python_type_or_object):
        predicate = issubclass
    else:
        predicate = isinstance
    if predicate(python_type_or_object, (str,
                                         fields.String,
                                         fields.FormattedString,
                                         fields.Url,
                                         int,
                                         fields.Integer,
                                         float,
                                         fields.Float,
                                         fields.Arbitrary,
                                         fields.Fixed,
                                         bool,
                                         fields.Boolean,
                                         fields.DateTime)):
        return {'type': deduce_swagger_type_flat(python_type_or_object)}
    if predicate(python_type_or_object, (fields.List)):
        if inspect.isclass(python_type_or_object):
          return {'type': 'array'}
        else:
          return {'type': 'array',
                  'items': {
                    '$ref': deduce_swagger_type_flat(
                      python_type_or_object.container, nested_type)}}
    if predicate(python_type_or_object, (fields.Nested)):
        return {'type': nested_type}

    return {'type': 'null'}
Example #12
0
 def setUp(self):
     super().setUp()
     self.Native = self.module.Native
     self.NativeError = self.module.NativeError
     self.assertTrue(inspect.isclass(self.Native))
     self.assertTrue(inspect.isclass(self.NativeError))
     self.assertTrue(issubclass(self.NativeError, ValueError))
Example #13
0
    def _load_from_dir(self):
        plugins = {}
        pyfiles = glob.glob(os.path.join(self.source, '*.py'))

        for filename in pyfiles:
            try:
                globs = runpy.run_path(filename)
                for name, o in globs.items():
                    # Don't try to register plugins imported into this module
                    # from somewhere else
                    if inspect.isclass(o) and issubclass(
                            o, Plugin) and o.__module__ == '<run_path>':
                        p = o()
                        plugins[p.name] = p
                # If no exceptions, continue to next filename
                continue
            except Exception as ex:
                logger.warning('When importing {}:\n{}'.format(filename, ex))

            import imp
            base = os.path.splitext(filename)[0]
            mod = imp.load_source(base, filename)
            for name in mod.__dict__:
                obj = getattr(mod, name)
                # Don't try to register plugins imported into this module
                # from somewhere else
                if inspect.isclass(obj) and issubclass(
                        obj, Plugin) and obj.__module__ == base:
                    p = obj()
                    plugins[p.name] = p

        return plugins
Example #14
0
    def process_members(self, package_name, mod):
        """
        Process all members of the package or module passed.
        """
        name = mod.__name__

        for k, m in inspect.getmembers(mod):
            self.log.debug("in %s processing element %s" % (mod.__name__, k))
            if not inspect.isclass(m) and hasattr(m, '__module__') and m.__module__ and m.__module__.startswith(package_name):
                key = "%s.%s" % (m.__module__, k)
                self.fetch_item_content(key, m)

            elif inspect.isclass(m) and m.__module__.startswith(package_name):
                key = "%s.%s" % (mod.__name__, k)
                try:
                    item_content = inspect.getsource(m)
                    self.artifact.output_data.append("%s:doc" % key, inspect.getdoc(m))
                    self.artifact.output_data.append("%s:comments" % key, inspect.getcomments(m))
                    self.add_source_for_key(key, item_content)
                except IOError:
                    self.log.debug("can't get source for %s" % key)
                    self.add_source_for_key(key, "")

                try:
                    for ck, cm in inspect.getmembers(m):
                        key = "%s.%s.%s" % (name, k, ck)
                        self.fetch_item_content(key, cm)
                except AttributeError:
                    pass

            else:
                key = "%s.%s" % (name, k)
                self.fetch_item_content(key, m)
Example #15
0
    def __init__(self, Entity, QEntity=None):
        '''
        Construct the entity support for the provided model class and query class.
        
        @param Entity: class
            The mapped entity model class.
        @param QEntity: class|None
            The query mapped class if there is one.
        '''
        assert isclass(Entity), 'Invalid class %s' % Entity
        assert issubclass(Entity, api.Entity), 'Invalid entity class %s' % Entity
        assert isinstance(Entity, MappedSupport), 'Invalid mapped class %s' % Entity
        self.modelType = typeFor(Entity)
        assert isinstance(self.modelType, TypeModel), 'Invalid model class %s' % Entity

        self.model = self.modelType.container
        self.Entity = Entity

        if QEntity is not None:
            assert isclass(QEntity), 'Invalid class %s' % QEntity
            assert issubclass(QEntity, api.QEntity), 'Invalid query entity class %s' % QEntity
            self.queryType = typeFor(QEntity)
            assert isinstance(self.queryType, TypeQuery), 'Invalid query class %s' % QEntity
            self.query = self.queryType.query
        else:
            self.query = self.queryType = None
        self.QEntity = QEntity
Example #16
0
    def _should_get_logged(entity_name):  # pylint: disable=
        """ Checks if current call stack of current entity should be logged or not.

        Arguments:
            entity_name - Name of the current entity
        Returns:
            True if the current call stack is to logged, False otherwise
        """
        is_class_in_halt_tracking = bool(HALT_TRACKING and inspect.isclass(entity_name) and
                                         issubclass(entity_name, tuple(HALT_TRACKING[-1])))

        is_function_in_halt_tracking = bool(HALT_TRACKING and not inspect.isclass(entity_name) and
                                            any((entity_name.__name__ == x.__name__ and
                                                 entity_name.__module__ == x.__module__)
                                                for x in tuple(HALT_TRACKING[-1])))

        is_top_none = HALT_TRACKING and HALT_TRACKING[-1] is None
        # if top of STACK_BOOK is None
        if is_top_none:
            return False
        # if call stack is empty
        if not temp_call_stack:
            return False

        if HALT_TRACKING:
            if is_class_in_halt_tracking or is_function_in_halt_tracking:
                return False
            else:
                return temp_call_stack not in STACK_BOOK[entity_name]
        else:
            return temp_call_stack not in STACK_BOOK[entity_name]
Example #17
0
def analyse(obj):
    members = obj.__dict__
    if inspect.isclass(obj):
        main_doc = preprocess_routine(obj.__name__, get_doc(members))
        bases = [x.__name__ for x in obj.__bases__]
    else:
        main_doc = split_para(get_doc(members))
        bases = []
    routines = {}
    classes = {}
    data = {}
    for name, m in members.items():
        if name.startswith('__'): continue
        try:
            mro = list(inspect.getmro(m))
            if mro[0] != m: continue
        except AttributeError: pass
        if inspect.isroutine(m):
            try: doc = m.__doc__
            except KeyError: pass
            if not doc: doc = 'FIXME'
            routines[name] = preprocess_routine(name, doc)
            continue
        if inspect.isclass(m):
            classes[name] = analyse(m)
            continue
        t = type(m)
        if t == types.IntType or t == types.StringType:
            data[name] = repr(m)
        else:
            data[name] = m.__doc__
    return {'name': obj.__name__, 'doc': main_doc, 'routines': routines,
            'classes': classes, 'data': data, 'bases': bases}
Example #18
0
    def add_transform(self, fromsys, tosys, transform):
        """
        Add a new coordinate transformation to the graph.

        Parameters
        ----------
        fromsys : class
            The coordinate frame class to start from.
        tosys : class
            The coordinate frame class to transform into.
        transform : CoordinateTransform or similar callable
            The transformation object. Typically a `CoordinateTransform` object,
            although it may be some other callable that is called with the same
            signature.

        Raises
        ------
        TypeError
            If ``fromsys`` or ``tosys`` are not classes or ``transform`` is
            not callable.
        """

        if not inspect.isclass(fromsys):
            raise TypeError('fromsys must be a class')
        if not inspect.isclass(tosys):
            raise TypeError('tosys must be a class')
        if not six.callable(transform):
            raise TypeError('transform must be callable')

        self._graph[fromsys][tosys] = transform
        self.invalidate_cache()
Example #19
0
def _rejigger_module(old, new, ignores):
    """Mighty morphin power modules"""
    __internal_swaprefs_ignore__ = "rejigger_module"
    oldVars = vars(old)
    newVars = vars(new)
    ignores += (id(oldVars),)
    old.__doc__ = new.__doc__

    # Get filename used by python code
    filename = new.__file__

    for name, value in newVars.iteritems():
        if name in oldVars:
            oldValue = oldVars[name]
            if oldValue is value:
                continue

            if _from_file(filename, value):
                if inspect.isclass(value):
                    _rejigger_class(oldValue, value, ignores)
                    
                elif inspect.isfunction(value):
                    _rejigger_func(oldValue, value, ignores)
        
        setattr(old, name, value)

    for name in oldVars.keys():
        if name not in newVars:
            value = getattr(old, name)
            delattr(old, name)
            if _from_file(filename, value):
                if inspect.isclass(value) or inspect.isfunction(value):
                    _remove_refs(value, ignores)
    
    _swap_refs(old, new, ignores)
Example #20
0
def _parse_module(conanfile_module, filename):
    """ Parses a python in-memory module, to extract the classes, mainly the main
    class defining the Recipe, but also process possible existing generators
    @param conanfile_module: the module to be processed
    @param consumer: if this is a root node in the hierarchy, the consumer project
    @return: the main ConanFile class from the module
    """
    result = None
    for name, attr in conanfile_module.__dict__.items():
        if name[0] == "_":
            continue
        if (inspect.isclass(attr) and issubclass(attr, ConanFile) and attr != ConanFile and
                attr.__dict__["__module__"] == filename):
            if result is None:
                result = attr
            else:
                raise ConanException("More than 1 conanfile in the file")
        if (inspect.isclass(attr) and issubclass(attr, Generator) and attr != Generator and
                attr.__dict__["__module__"] == filename):
            registered_generators.add(attr.__name__, attr)

    if result is None:
        raise ConanException("No subclass of ConanFile")

    return result
Example #21
0
    def __init__(self, fromsys, tosys, priority=1, register_graph=None):
        if not inspect.isclass(fromsys):
            raise TypeError('fromsys must be a class')
        if not inspect.isclass(tosys):
            raise TypeError('tosys must be a class')

        self.fromsys = fromsys
        self.tosys = tosys
        self.priority = float(priority)

        if register_graph:
            # this will do the type-checking when it adds to the graph
            self.register(register_graph)
        else:
            if not inspect.isclass(fromsys) or not inspect.isclass(tosys):
                raise TypeError('fromsys and tosys must be classes')

        self.overlapping_frame_attr_names = overlap = []
        if (hasattr(fromsys, 'get_frame_attr_names') and
                hasattr(tosys, 'get_frame_attr_names')):
            #the if statement is there so that non-frame things might be usable
            #if it makes sense
            for from_nm in fromsys.get_frame_attr_names():
                if from_nm in tosys.get_frame_attr_names():
                    overlap.append(from_nm)
Example #22
0
    def __init__(self, *args, **kwargs):
        args = list(args)

        self.table = None
        try:
            if isinstance(args[0], basestring):
                self.name = args.pop(0)
            else:
                self.name = None

            if inspect.isclass(args[0]) and issubclass(args[0], Type)\
                    or inspect.isclass(type(args[0]))\
                    and issubclass(type(args[0]), Type):
                self.type_ = args.pop(0)
            else:
                self.type_ = None
        except IndexError:
            raise TypeError
        else:
            if len(args) > 0:
                raise TypeError()

        self.primary_key = kwargs.get(u'primary_key', False)
        self.nullable = kwargs.get(u'nullable', True)
        self.auto_increment = kwargs.get(u'auto_increment', False)
Example #23
0
def _assert_valid_type_signature(type_sig, type_sig_name):
  """Checks that the given type signature is valid.

  Valid type signatures are either a single Python class, or a dictionary
  mapping string names to Python classes.

  Throws a well formatted exception when invalid.

  Args:
    type_sig: Type signature to validate.
    type_sig_name: Variable name of the type signature. This is used in
        exception descriptions.

  Raises:
    InvalidTypeSignatureException: If `type_sig` is not valid.
  """
  if isinstance(type_sig, dict):
    for k, val in type_sig.items():
      if not isinstance(k, basestring):
        raise InvalidTypeSignatureException(
            '%s key %s must be a string.' % (type_sig_name, k))
      if not inspect.isclass(val):
        raise InvalidTypeSignatureException(
            '%s %s at key %s must be a Python class.' % (type_sig_name, val, k))
  else:
    if not inspect.isclass(type_sig):
      raise InvalidTypeSignatureException(
          '%s %s must be a Python class.' % (type_sig_name, type_sig))
Example #24
0
def build_opener(*handlers):
    """Create an opener object from a list of handlers.

    The opener will use several default handlers, including support
    for HTTP and FTP.

    If any of the handlers passed as arguments are subclasses of the
    default handlers, the default handlers will not be used.
    """

    opener = OpenerDirector()
    default_classes = [ProxyHandler, UnknownHandler, HTTPHandler,
                       HTTPDefaultErrorHandler, HTTPRedirectHandler,
                       FTPHandler, FileHandler, HTTPErrorProcessor]
    if hasattr(httplib, 'HTTPS'):
        default_classes.append(HTTPSHandler)
    skip = []
    for klass in default_classes:
        for check in handlers:
            if inspect.isclass(check):
                if issubclass(check, klass):
                    skip.append(klass)
            elif isinstance(check, klass):
                skip.append(klass)
    for klass in skip:
        default_classes.remove(klass)

    for klass in default_classes:
        opener.add_handler(klass())

    for h in handlers:
        if inspect.isclass(h):
            h = h()
        opener.add_handler(h)
    return opener
Example #25
0
    def transform_params_to_database(self, params):
        """
        Takes a dictionary of parameters and recursively translates them into
        parameters appropriate for sending over Rexster.

        :param params: The parameters to be sent to the function
        :type params: dict

        """
        import inspect
        from datetime import datetime
        from decimal import Decimal as _Decimal
        from uuid import UUID as _UUID
        from thunderdome.models import BaseElement, Edge, Vertex
        from thunderdome.properties import DateTime, Decimal, UUID

        if isinstance(params, dict):
            return {k:self.transform_params_to_database(v) for k,v in params.iteritems()}
        if isinstance(params, list):
            return [self.transform_params_to_database(x) for x in params]
        if isinstance(params, BaseElement):
            return params.eid
        if inspect.isclass(params) and issubclass(params, Edge):
            return params.label
        if inspect.isclass(params) and issubclass(params, Vertex):
            return params.element_type
        if isinstance(params, datetime):
            return DateTime().to_database(params)
        if isinstance(params, _UUID):
            return UUID().to_database(params)
        if isinstance(params, _Decimal):
            return Decimal().to_database(params)
        return params
Example #26
0
def create_class(class_name, dynamic_imports):
    """Dynamically creates a class.

    It is tried if the class can be created by the already given imports.
    If not the list of the dynamically loaded classes is used.

    """
    try:
        new_class = globals()[class_name]

        if not inspect.isclass(new_class):
            raise TypeError('Not a class!')

        return new_class
    except (KeyError, TypeError):
        for dynamic_class in dynamic_imports:
            # Dynamic classes can be provided directly as a Class instance,
            # for example as `MyCustomParameter`,
            # or as a string describing where to import the class from,
            # for instance as `'mypackage.mymodule.MyCustomParameter'`.
            if inspect.isclass(dynamic_class):
                if class_name == dynamic_class.__name__:
                    return dynamic_class
            else:
                # The class name is always the last in an import string,
                # e.g. `'mypackage.mymodule.MyCustomParameter'`
                class_name_to_test = dynamic_class.split('.')[-1]
                if class_name == class_name_to_test:
                    new_class = load_class(dynamic_class)
                    return new_class
        raise ImportError('Could not create the class named `%s`.' % class_name)
Example #27
0
def _check_for_unknown_fields(body, structure, path):
    """Check `body` for any keys not present in `structure`.

    This only checks the first level of keys.  Any keys from :class:`dict`s in
    the `body`\ 's values will not be checked.

    """
    type_keys = tuple([key for key in structure if isclass(key)])
    existing_fields = set([key for key in body if not isclass(key)])
    unknown_fields = existing_fields.difference(structure.keys())
    # If there are valid types for a key filter out unknown fields that match a
    # type.
    if type_keys:
        unknown_fields = [key for key in unknown_fields
                          if not isinstance(key, type_keys)]

    if unknown_fields:
        unknown_fields = ', '.join([repr(field) for field in unknown_fields])
        if path:
            err = ('Encountered field(s), in subdocument at {0},'
                   ' not present in structure: {1}'.format(
                       path, unknown_fields))
        else:
            err = 'Encountered field(s) not present in structure: {0}'.format(
                unknown_fields)

        raise ValidationError(err)
Example #28
0
 def get_scope_objects(names):
     """
     Looks for the names defined with dir() in an objects and divides
     them into different object types.
     """
     classes = {}
     funcs = {}
     stmts = {}
     members = {}
     for n in names:
         if '__' in n and n not in mixin_funcs:
             continue
         try:
             # this has a builtin_function_or_method
             exe = getattr(scope, n)
         except AttributeError:
             # happens e.g. in properties of
             # PyQt4.QtGui.QStyleOptionComboBox.currentText
             # -> just set it to None
             members[n] = None
         else:
             if inspect.isclass(scope):
                 if is_in_base_classes(scope, n, exe):
                     continue
             if inspect.isbuiltin(exe) or inspect.ismethod(exe) \
                         or inspect.ismethoddescriptor(exe):
                 funcs[n] = exe
             elif inspect.isclass(exe):
                 classes[n] = exe
             elif inspect.ismemberdescriptor(exe):
                 members[n] = exe
             else:
                 stmts[n] = exe
     return classes, funcs, stmts, members
Example #29
0
def _make_constraint(c):
    if c is None:
        return _any()
    elif isinstance(c, list):
        if len(c) == 1:
            # single-item list: sequenceof() constraint
            return sequenceof(c[0])
        else:
            # list: tupleof() constraint
            return tupleof(*c)
    elif isinstance(c, dict):
        assert len(c) == 1
        # single-item dict: mappingof() constraint
        return mappingof(list(c.keys())[0], list(c.values())[0])
    elif isinstance(c, tuple):
        if all(inspect.isclass(cc) for cc in c):
            # multiple types: type constraint
            return _type_constraint(c, True)
        else:
            # any other tuple: value constraint
            return _value_constraint(c)
    elif inspect.isclass(c):
        # single type: type constraint
        return _type_constraint(c)
    elif isinstance(c, _constraint):
        # constraint object
        return c
    elif isinstance(c, collections.Callable):
        # function or other callable object
        return transform(c)
    else:
        assert False
Example #30
0
    def list(self, attribute="run"):
        '''
        Finds all modules with the specified attribute.

        @attribute - The desired module attribute.

        Returns a list of modules that contain the specified attribute, in the order they should be executed.
        '''
        import binwalk.modules
        modules = {}

        for (name, module) in inspect.getmembers(binwalk.modules):
            if inspect.isclass(module) and hasattr(module, attribute):
                modules[module] = module.PRIORITY

        # user-defined modules
        import imp
        user_modules = binwalk.core.settings.Settings().user.modules
        for file_name in os.listdir(user_modules):
            if not file_name.endswith('.py'):
                continue
            module_name = file_name[:-3]
            try:
                user_module = imp.load_source(module_name, os.path.join(user_modules, file_name))
            except KeyboardInterrupt as e:
                raise e
            except Exception as e:
                binwalk.core.common.warning("Error loading module '%s': %s" % (file_name, str(e)))

            for (name, module) in inspect.getmembers(user_module):
                if inspect.isclass(module) and hasattr(module, attribute):
                    modules[module] = module.PRIORITY

        return sorted(modules, key=modules.get, reverse=True)
Example #31
0
    def inspect_call(
        cls,
        node: ast.Call,
        gctx: EvalMainContext,
        mod: ModuleType,
        var_names: Set[LocalVar],
        call_stack: List[CanonicalPath],
    ) -> Union[FunctionIndirectInteractions, DDSPath, None]:
        local_path = LocalDepPath(
            PurePosixPath("/".join(_function_name(node.func))))
        # _logger.debug(f"inspect_call: local_path: %s", local_path)
        # We may do sub-method calls on an object -> filter out based on the name of the object
        if str(local_path.parts[0]) in var_names:
            # _logger.debug(
            #     f"inspect_call: local_path: %s is rejected (head in vars)", local_path
            # )
            return None

        # _logger.debug(f"inspect_call:local_path:{local_path} mod:{mod}\n %s", pformat(node))
        z: ObjectRetrievalType = ObjectRetrieval.retrieve_object(
            local_path, mod, gctx)
        # _logger.debug(f"inspect_call:local_path:{local_path} mod:{mod} z:{z}")
        if z is None or isinstance(z, ExternalObject):
            # _logger.debug(f"inspect_call: local_path: %s is rejected", local_path)
            return None
        assert isinstance(z, AuthorizedObject)
        caller_fun, caller_fun_path = z.object_val, z.resolved_path
        if not isinstance(caller_fun,
                          FunctionType) and not inspect.isclass(caller_fun):
            raise DDSException(
                f"Expected FunctionType or class for {caller_fun_path}, got {type(caller_fun)}",
                DDSErrorCode.UNSUPPORTED_CALLABLE_TYPE,
            )

        # Check if this is a call we should do something about.
        if caller_fun_path == CPU.from_list(["dds", "keep"]):
            # Call to the keep function:
            # - bring the path
            # - bring the callee
            # - parse the arguments
            # - introspect the callee
            if len(node.args) < 2:
                raise DDSException(
                    f"Wrong number of args: expected 2+, got {node.args}")
            store_path = InspectFunction._retrieve_store_path(
                node.args[0], mod, gctx, local_path)
            called_path_ast = node.args[1]
            if isinstance(called_path_ast, ast.Name):
                called_path_symbol = node.args[1].id  # type: ignore
            else:
                raise DDSException(
                    f"Introspection of {local_path} failed: cannot use nested callables of"
                    f" type {called_path_ast}. Only "
                    f"regular function names are allowed for now. Suggestion: if you are "
                    f"using a complex callable such as a method, wrap it inside a top-level "
                    f"function.",
                    DDSErrorCode.UNSUPPORTED_CALLABLE_TYPE,
                )
            called_local_path = LocalDepPath(PurePosixPath(called_path_symbol))
            called_z: ObjectRetrievalType = ObjectRetrieval.retrieve_object(
                called_local_path, mod, gctx)
            if not called_z or isinstance(called_z, ExternalObject):
                # Not sure what to do yet in this case.
                raise DDSException(
                    f"Introspection of {local_path} failed: cannot access called function"
                    f" {called_local_path}. The function {called_local_path} was expected "
                    f"to be found in module {mod}, but could not be retrieved. The usual reason is"
                    f"that that this object is not a regular top-level function. "
                    f"Suggestion: ensure that this function is a top-level function.",
                    DDSErrorCode.UNSUPPORTED_CALLABLE_TYPE,
                )
            assert isinstance(called_z, AuthorizedObject)
            called_fun, call_fun_path = called_z.object_val, called_z.resolved_path
            if call_fun_path in call_stack:
                raise DDSException(
                    f"Detected circular function calls or (co-)recursive calls."
                    f"This is currently not supported. Change your code to split the "
                    f"recursive section into a separate function. "
                    f"Function: {call_fun_path}"
                    f"Call stack: {' '.join([str(p) for p in call_stack])}",
                    DDSErrorCode.CIRCULAR_CALL,
                )
            new_call_stack = call_stack + [call_fun_path]
            # For now, accept the constant arguments. This is enough for some basic objects.
            inner_intro = _introspect(called_fun, gctx, new_call_stack)
            inner_intro = inner_intro._replace(store_path=store_path)
            return inner_intro
        if caller_fun_path == CPU.from_list(["dds", "load"]):
            # Evaluation call: get the argument and returns the function interaction for this call.
            if len(node.args) != 1:
                raise DDSException(
                    f"Wrong number of args: expected 1, got {node.args}")
            store_path = InspectFunction._retrieve_store_path(
                node.args[0], mod, gctx, local_path)
            _logger.debug(f"inspect_call:eval: store_path: {store_path}")
            return store_path

        if caller_fun_path == CPU.from_list(["dds", "eval"]):
            raise DDSException(
                f"Cannot process {local_path}: this function is calling dds.eval, which"
                f" is not allowed inside other eval calls. Suggestion: remove the "
                f"call to dds.eval inside {local_path}",
                DDSErrorCode.EVAL_IN_EVAL,
            )

        if caller_fun_path in call_stack:
            raise DDSException(
                f"Detected circular function calls or (co-)recursive calls."
                f"This is currently not supported. Change your code to split the "
                f"recursive section into a separate function. "
                f"Function: {caller_fun_path}"
                f"Call stack: {' '.join([str(p) for p in call_stack])}",
                DDSErrorCode.CIRCULAR_CALL,
            )
        # Normal function call.
        new_call_stack = call_stack + [caller_fun_path]
        return _introspect(caller_fun, gctx, new_call_stack)
Example #32
0
def _is_proper_charge_subclass(attr):
    return inspect.isclass(attr) and issubclass(attr, Charge) and attr != Charge
Example #33
0
 def is_class(self):
     return inspect.isclass(self.obj)
Example #34
0
 def actual(evaluator, params):
     if inspect.isclass(self.obj):
         from jedi.evaluate.representation import Instance
         return [Instance(evaluator, self, params)]
     else:
         return list(self._execute_function(evaluator, params))
Example #35
0
def factory(name):
    for fname, obj in inspect.getmembers(sys.modules[__name__]):
        if inspect.isclass(obj):
            if name == fname:
                return obj
Example #36
0
def scan(name, o, prefix=""):

    doc_type = "function"

    # The section it's going into.
    section = None

    # The formatted arguments.
    args = None

    # Get the function's docstring.
    doc = inspect.getdoc(o)

    if not doc:
        return

    # Break up the doc string, scan it for specials.
    lines = []

    for l in doc.split("\n"):
        m = re.match(r':doc: *(\w+) *(\w+)?', l)
        if m:
            section = m.group(1)

            if m.group(2):
                doc_type = m.group(2)

            continue

        m = re.match(r':args: *(.*)', l)
        if m:
            args = m.group(1)
            continue

        m = re.match(r':name: *(\S+)', l)
        if m:
            if name != m.group(1):
                return
            continue

        lines.append(l)

    if section is None:
        return

    if args is None:

        # Get the arguments.
        if inspect.isclass(o):
            init = getattr(o, "__init__", None)
            if not init:
                return

            init_doc = inspect.getdoc(init)

            if init_doc and not init_doc.startswith("x.__init__("):
                lines.append("")
                lines.extend(init_doc.split("\n"))

            try:
                args = inspect.getargspec(init)
            except:
                args = None

        elif inspect.isfunction(o):
            args = inspect.getargspec(o)

        elif inspect.ismethod(o):
            args = inspect.getargspec(o)

        else:
            print("Warning: %s has section but not args." % name)

            return

        # Format the arguments.
        if args is not None:

            args = inspect.formatargspec(*args)
            args = args.replace("(self, ", "(")
        else:
            args = "()"

    # Put it into the line buffer.
    lb = line_buffer[section]

    lb.append(prefix + ".. %s:: %s%s" % (doc_type, name, args))

    for l in lines:
        lb.append(prefix + "    " + l)

    lb.append(prefix + "")

    if inspect.isclass(o):
        for i in dir(o):
            scan(i, getattr(o, i), prefix + "    ")
Example #37
0
    def update(self, data_changed=True, **kwargs):
        if data_changed:
            # When working with lazy signals the following may reread the data
            # from disk unnecessarily, for example when updating the image just
            # to recompute the histogram to adjust the contrast. In those cases
            # use `data_changed=True`.
            _logger.debug("Updating image slowly because `data_changed=True`")
            self._update_data()
        data = self._current_data
        optimize_contrast = kwargs.pop("optimize_contrast", False)
        if rgb_tools.is_rgbx(data):
            self.colorbar = False
            data = rgb_tools.rgbx2regular_array(data, plot_friendly=True)
            data = self._current_data = data
            self._is_rgb = True
        ims = self.ax.images
        # update extent:
        self._extent = (self.xaxis.axis[0] - self.xaxis.scale / 2.,
                        self.xaxis.axis[-1] + self.xaxis.scale / 2.,
                        self.yaxis.axis[-1] + self.yaxis.scale / 2.,
                        self.yaxis.axis[0] - self.yaxis.scale / 2.)

        # Turn on centre_colormap if a diverging colormap is used.
        if not self._is_rgb and self.centre_colormap == "auto":
            if "cmap" in kwargs:
                cmap = kwargs["cmap"]
            elif ims:
                cmap = ims[0].get_cmap().name
            else:
                cmap = plt.cm.get_cmap().name
            if cmap in utils.MPL_DIVERGING_COLORMAPS:
                self.centre_colormap = True
            else:
                self.centre_colormap = False
        redraw_colorbar = False

        for marker in self.ax_markers:
            marker.update()

        if not self._is_rgb:

            def format_coord(x, y):
                try:
                    col = self.xaxis.value2index(x)
                except ValueError:  # out of axes limits
                    col = -1
                try:
                    row = self.yaxis.value2index(y)
                except ValueError:
                    row = -1
                if col >= 0 and row >= 0:
                    z = data[row, col]
                    if np.isfinite(z):
                        return f'x={x:1.4g}, y={y:1.4g}, intensity={z:1.4g}'
                return f'x={x:1.4g}, y={y:1.4g}'

            self.ax.format_coord = format_coord

            old_vmin, old_vmax = self.vmin, self.vmax
            self.optimize_contrast(data, optimize_contrast)
            # Use _vmin_auto and _vmax_auto if optimize_contrast is True
            if optimize_contrast:
                vmin, vmax = self._vmin_auto, self._vmax_auto
            else:
                vmin, vmax = self.vmin, self.vmax
            # If there is an image, any of the contrast bounds have changed and
            # the new contrast bounds are not the same redraw the colorbar.
            if (ims and (old_vmin != vmin or old_vmax != vmax)
                    and vmin != vmax):
                redraw_colorbar = True
                ims[0].autoscale()
            if self.centre_colormap:
                vmin, vmax = utils.centre_colormap_values(vmin, vmax)
            else:
                vmin, vmax = vmin, vmax

            if self.norm == 'auto' and self.gamma != 1.0:
                self.norm = 'power'
            norm = copy.copy(self.norm)
            if norm == 'power':
                # with auto norm, we use the power norm when gamma differs from its
                # default value.
                norm = PowerNorm(self.gamma, vmin=vmin, vmax=vmax)
            elif norm == 'log':
                if np.nanmax(data) <= 0:
                    raise ValueError(
                        'All displayed data are <= 0 and can not '
                        'be plotted using `norm="log"`. '
                        'Use `norm="symlog"` to plot on a log scale.')
                if np.nanmin(data) <= 0:
                    vmin = np.nanmin(np.where(data > 0, data, np.inf))

                norm = LogNorm(vmin=vmin, vmax=vmax)
            elif norm == 'symlog':
                sym_log_kwargs = {
                    'linthresh': self.linthresh,
                    'linscale': self.linscale,
                    'vmin': vmin,
                    'vmax': vmax
                }
                if LooseVersion(matplotlib.__version__) >= LooseVersion("3.2"):
                    sym_log_kwargs['base'] = 10
                norm = SymLogNorm(**sym_log_kwargs)
            elif inspect.isclass(norm) and issubclass(norm, Normalize):
                norm = norm(vmin=vmin, vmax=vmax)
            elif norm not in ['auto', 'linear']:
                raise ValueError(
                    "`norm` paramater should be 'auto', 'linear', "
                    "'log', 'symlog' or a matplotlib Normalize  "
                    "instance or subclass.")
            else:
                # set back to matplotlib default
                norm = None
        redraw_colorbar = redraw_colorbar and self.colorbar

        if self.plot_indices is True:
            self._text.set_text(self.axes_manager.indices)
        if self.no_nans:
            data = np.nan_to_num(data)

        if ims:  # the images has already been drawn previously
            ims[0].set_data(data)
            self.ax.set_xlim(self._extent[:2])
            self.ax.set_ylim(self._extent[2:])
            ims[0].set_extent(self._extent)
            self._calculate_aspect()
            self.ax.set_aspect(self._aspect)
            if not self._is_rgb:
                ims[0].set_norm(norm)
                ims[0].norm.vmax, ims[0].norm.vmin = vmax, vmin
            if redraw_colorbar:
                # ims[0].autoscale()
                self._colorbar.draw_all()
                self._colorbar.solids.set_animated(
                    self.figure.canvas.supports_blit)
            else:
                ims[0].changed()
            if self.figure.canvas.supports_blit:
                self._update_animated()
            else:
                self.figure.canvas.draw_idle()
        else:  # no signal have been drawn yet
            new_args = {
                'interpolation': 'nearest',
                'extent': self._extent,
                'aspect': self._aspect,
                'animated': self.figure.canvas.supports_blit,
            }
            if not self._is_rgb:
                if norm is None:
                    new_args.update({'vmin': vmin, 'vmax': vmax})
                else:
                    new_args['norm'] = norm
            new_args.update(kwargs)
            self.ax.imshow(data, **new_args)
            self.figure.canvas.draw_idle()

        if self.axes_ticks == 'off':
            self.ax.set_axis_off()
Example #38
0
def safe_patch(autologging_integration,
               destination,
               function_name,
               patch_function,
               manage_run=False):
    """
    Patches the specified `function_name` on the specified `destination` class for autologging
    purposes, preceding its implementation with an error-safe copy of the specified patch
    `patch_function` with the following error handling behavior:
        - Exceptions thrown from the underlying / original function
          (`<destination>.<function_name>`) are propagated to the caller.
        - Exceptions thrown from other parts of the patched implementation (`patch_function`)
          are caught and logged as warnings.
    :param autologging_integration: The name of the autologging integration associated with the
                                    patch.
    :param destination: The Python class on which the patch is being defined.
    :param function_name: The name of the function to patch on the specified `destination` class.
    :param patch_function: The patched function code to apply. This is either a `PatchFunction`
                           class definition or a function object. If it is a function object, the
                           first argument should be reserved for an `original` method argument
                           representing the underlying / original function. Subsequent arguments
                           should be identical to those of the original function being patched.
    :param manage_run: If `True`, applies the `with_managed_run` wrapper to the specified
                       `patch_function`, which automatically creates & terminates an MLflow
                       active run during patch code execution if necessary. If `False`,
                       does not apply the `with_managed_run` wrapper to the specified
                       `patch_function`.
    """
    from mlflow.utils.autologging_utils import get_autologging_config, autologging_is_disabled

    if manage_run:
        patch_function = with_managed_run(
            autologging_integration,
            patch_function,
            tags={MLFLOW_AUTOLOGGING: autologging_integration},
        )

    patch_is_class = inspect.isclass(patch_function)
    if patch_is_class:
        assert issubclass(patch_function, PatchFunction)
    else:
        assert callable(patch_function)

    original_fn = gorilla.get_original_attribute(
        destination, function_name, bypass_descriptor_protocol=False)
    # Retrieve raw attribute while bypassing the descriptor protocol
    raw_original_obj = gorilla.get_original_attribute(
        destination, function_name, bypass_descriptor_protocol=True)
    if original_fn != raw_original_obj:
        raise RuntimeError(
            f"Unsupport patch on {str(destination)}.{function_name}")
    elif isinstance(original_fn, property):
        is_property_method = True

        # For property decorated methods (a kind of method delegation), e.g.
        # class A:
        #   @property
        #   def f1(self):
        #     ...
        #     return delegated_f1
        #
        # suppose `a1` is an instance of class `A`,
        # `A.f1.fget` will get the original `def f1(self)` method,
        # and `A.f1.fget(a1)` will be equivalent to `a1.f1()` and
        # its return value will be the `delegated_f1` function.
        # So using the `property.fget` we can construct the (delegated) "original_fn"
        def original(self, *args, **kwargs):
            # the `original_fn.fget` will get the original method decorated by `property`
            # the `original_fn.fget(self)` will get the delegated function returned by the
            # property decorated method.
            bound_delegate_method = original_fn.fget(self)
            return bound_delegate_method(*args, **kwargs)

    else:
        original = original_fn
        is_property_method = False

    def safe_patch_function(*args, **kwargs):
        """
        A safe wrapper around the specified `patch_function` implementation designed to
        handle exceptions thrown during the execution of `patch_function`. This wrapper
        distinguishes exceptions thrown from the underlying / original function
        (`<destination>.<function_name>`) from exceptions thrown from other parts of
        `patch_function`. This distinction is made by passing an augmented version of the
        underlying / original function to `patch_function` that uses nonlocal state to track
        whether or not it has been executed and whether or not it threw an exception.
        Exceptions thrown from the underlying / original function are propagated to the caller,
        while exceptions thrown from other parts of `patch_function` are caught and logged as
        warnings.
        """
        # Reroute warnings encountered during the patch function implementation to an MLflow event
        # logger, and enforce silent mode if applicable (i.e. if the corresponding autologging
        # integration was called with `silent=True`), hiding MLflow event logging statements and
        # hiding all warnings in the autologging preamble and postamble (i.e. the code surrounding
        # the user's original / underlying ML function). Non-MLflow warnings are enabled during the
        # execution of the original / underlying ML function
        #
        # Note that we've opted *not* to apply this context manager as a decorator on
        # `safe_patch_function` because the context-manager-as-decorator pattern uses
        # `contextlib.ContextDecorator`, which creates generator expressions that cannot be pickled
        # during model serialization by ML frameworks such as scikit-learn
        is_silent_mode = get_autologging_config(autologging_integration,
                                                "silent", False)
        with set_mlflow_events_and_warnings_behavior_globally(
                # MLflow warnings emitted during autologging training sessions are likely not
                # actionable and result from the autologging implementation invoking another MLflow
                # API. Accordingly, we reroute these warnings to the MLflow event logger with level
                # WARNING For reference, see recommended warning and event logging behaviors from
                # https://docs.python.org/3/howto/logging.html#when-to-use-logging
                reroute_warnings=True,
                disable_event_logs=is_silent_mode,
                disable_warnings=is_silent_mode,
        ), set_non_mlflow_warnings_behavior_for_current_thread(
                # non-MLflow Warnings emitted during the autologging preamble (before the original /
                # underlying ML function is called) and postamble (after the original / underlying ML
                # function is called) are likely not actionable and result from the autologging
                # implementation invoking an API from a dependent library. Accordingly, we reroute
                # these warnings to the MLflow event logger with level WARNING. For reference, see
                # recommended warning and event logging behaviors from
                # https://docs.python.org/3/howto/logging.html#when-to-use-logging
                reroute_warnings=True,
                disable_warnings=is_silent_mode,
        ):

            if is_testing():
                preexisting_run_for_testing = mlflow.active_run()

            # Whether or not to exclude autologged content from user-created fluent runs
            # (i.e. runs created manually via `mlflow.start_run()`)
            exclusive = get_autologging_config(autologging_integration,
                                               "exclusive", False)
            user_created_fluent_run_is_active = (
                mlflow.active_run()
                and not _AutologgingSessionManager.active_session())
            active_session_failed = (
                _AutologgingSessionManager.active_session() is not None and
                _AutologgingSessionManager.active_session().state == "failed")

            if (active_session_failed
                    or autologging_is_disabled(autologging_integration)
                    or (user_created_fluent_run_is_active and exclusive)
                    or mlflow.utils.autologging_utils.
                    _AUTOLOGGING_GLOBALLY_DISABLED):
                # If the autologging integration associated with this patch is disabled,
                # or if the current autologging integration is in exclusive mode and a user-created
                # fluent run is active, call the original function and return. Restore the original
                # warning behavior during original function execution, since autologging is being
                # skipped
                with set_non_mlflow_warnings_behavior_for_current_thread(
                        disable_warnings=False,
                        reroute_warnings=False,
                ):
                    return original(*args, **kwargs)

            # Whether or not the original / underlying function has been called during the
            # execution of patched code
            original_has_been_called = False
            # The value returned by the call to the original / underlying function during
            # the execution of patched code
            original_result = None
            # Whether or not an exception was raised from within the original / underlying function
            # during the execution of patched code
            failed_during_original = False
            # The active MLflow run (if any) associated with patch code execution
            patch_function_run_for_testing = None
            # The exception raised during executing patching function
            patch_function_exception = None

            def try_log_autologging_event(log_fn, *args):
                try:
                    log_fn(*args)
                except Exception as e:
                    _logger.debug(
                        "Failed to log autologging event via '%s'. Exception: %s",
                        log_fn,
                        e,
                    )

            def call_original_fn_with_event_logging(original_fn, og_args,
                                                    og_kwargs):
                try:
                    try_log_autologging_event(
                        AutologgingEventLogger.get_logger().
                        log_original_function_start,
                        session,
                        destination,
                        function_name,
                        og_args,
                        og_kwargs,
                    )
                    original_fn_result = original_fn(*og_args, **og_kwargs)

                    try_log_autologging_event(
                        AutologgingEventLogger.get_logger().
                        log_original_function_success,
                        session,
                        destination,
                        function_name,
                        og_args,
                        og_kwargs,
                    )
                    return original_fn_result
                except Exception as original_fn_e:
                    try_log_autologging_event(
                        AutologgingEventLogger.get_logger().
                        log_original_function_error,
                        session,
                        destination,
                        function_name,
                        og_args,
                        og_kwargs,
                        original_fn_e,
                    )

                    nonlocal failed_during_original
                    failed_during_original = True
                    raise

            with _AutologgingSessionManager.start_session(
                    autologging_integration) as session:
                try:

                    def call_original(*og_args, **og_kwargs):
                        def _original_fn(*_og_args, **_og_kwargs):
                            if is_testing():
                                _validate_args(args, kwargs, og_args,
                                               og_kwargs)
                                # By the time `original` is called by the patch implementation, we
                                # assume that either: 1. the patch implementation has already
                                # created an MLflow run or 2. the patch code will not create an
                                # MLflow run during the current execution. Here, we capture a
                                # reference to the active run, which we will use later on to
                                # determine whether or not the patch implementation created
                                # a run and perform validation if necessary
                                nonlocal patch_function_run_for_testing
                                patch_function_run_for_testing = mlflow.active_run(
                                )

                            nonlocal original_has_been_called
                            original_has_been_called = True

                            nonlocal original_result
                            # Show all non-MLflow warnings as normal (i.e. not as event logs)
                            # during original function execution, even if silent mode is enabled
                            # (`silent=True`), since these warnings originate from the ML framework
                            # or one of its dependencies and are likely relevant to the caller
                            with set_non_mlflow_warnings_behavior_for_current_thread(
                                    disable_warnings=False,
                                    reroute_warnings=False,
                            ):
                                original_result = original(
                                    *_og_args, **_og_kwargs)
                                return original_result

                        return call_original_fn_with_event_logging(
                            _original_fn, og_args, og_kwargs)

                    # Apply the name, docstring, and signature of `original` to `call_original`.
                    # This is important because several autologging patch implementations inspect
                    # the signature of the `original` argument during execution
                    call_original = update_wrapper_extended(
                        call_original, original)

                    try_log_autologging_event(
                        AutologgingEventLogger.get_logger().
                        log_patch_function_start,
                        session,
                        destination,
                        function_name,
                        args,
                        kwargs,
                    )

                    if patch_is_class:
                        patch_function.call(call_original, *args, **kwargs)
                    else:
                        patch_function(call_original, *args, **kwargs)

                    session.state = "succeeded"

                    try_log_autologging_event(
                        AutologgingEventLogger.get_logger().
                        log_patch_function_success,
                        session,
                        destination,
                        function_name,
                        args,
                        kwargs,
                    )
                except Exception as e:
                    session.state = "failed"
                    patch_function_exception = e
                    # Exceptions thrown during execution of the original function should be
                    # propagated to the caller. Additionally, exceptions encountered during test
                    # mode should be reraised to detect bugs in autologging implementations
                    if failed_during_original or is_testing():
                        raise

                if is_testing() and not preexisting_run_for_testing:
                    # If an MLflow run was created during the execution of patch code, verify that
                    # it is no longer active and that it contains expected autologging tags
                    assert not mlflow.active_run(), (
                        "Autologging integration %s leaked an active run" %
                        autologging_integration)
                    if patch_function_run_for_testing:
                        _validate_autologging_run(
                            autologging_integration,
                            patch_function_run_for_testing.info.run_id)
                try:
                    if original_has_been_called:
                        return original_result
                    else:
                        return call_original_fn_with_event_logging(
                            original, args, kwargs)
                finally:
                    # If original function succeeds, but `patch_function_exception` exists,
                    # it represent patching code unexpected failure, so we call
                    # `log_patch_function_error` in this case.
                    # If original function failed, we don't call `log_patch_function_error`
                    # even if `patch_function_exception` exists, because original function failure
                    # means there's some error in user code (e.g. user provide wrong arguments)
                    if patch_function_exception is not None and not failed_during_original:
                        try_log_autologging_event(
                            AutologgingEventLogger.get_logger().
                            log_patch_function_error,
                            session,
                            destination,
                            function_name,
                            args,
                            kwargs,
                            patch_function_exception,
                        )

                        _logger.warning(
                            "Encountered unexpected error during %s autologging: %s",
                            autologging_integration,
                            patch_function_exception,
                        )

    if is_property_method:
        # Create a patched function (also property decorated)
        # like:
        #
        # class A:
        # @property
        # def get_bound_safe_patch_fn(self):
        #   original_fn.fget(self) # do availability check
        #   return bound_safe_patch_fn
        #
        # Suppose `a1` is instance of class A,
        # then `a1.get_bound_safe_patch_fn(*args, **kwargs)` will be equivalent to
        # `bound_safe_patch_fn(*args, **kwargs)`
        def get_bound_safe_patch_fn(self):
            # This `original_fn.fget` call is for availability check, if it raise error
            # then `hasattr(obj, {func_name})` will return False
            # so it mimic the original property behavior.
            original_fn.fget(self)

            def bound_safe_patch_fn(*args, **kwargs):
                return safe_patch_function(self, *args, **kwargs)

            # Make bound method `instance.target_method` keep the same doc and signature
            bound_safe_patch_fn = update_wrapper_extended(
                bound_safe_patch_fn, original_fn.fget)
            # Here return the bound safe patch function because user call property decorated
            # method will like `instance.property_decorated_method(...)`, and internally it will
            # call the `bound_safe_patch_fn`, the argument list don't include the `self` argument,
            # so return bound function here.
            return bound_safe_patch_fn

        # Make unbound method `class.target_method` keep the same doc and signature
        get_bound_safe_patch_fn = update_wrapper_extended(
            get_bound_safe_patch_fn, original_fn.fget)
        safe_patch_obj = property(get_bound_safe_patch_fn)
    else:
        safe_patch_obj = update_wrapper_extended(safe_patch_function, original)

    new_patch = _wrap_patch(destination, function_name, safe_patch_obj)
    _store_patch(autologging_integration, new_patch)
Example #39
0
    def __post_init__(self):
        if not (self.setting or self.method):
            raise RuntimeError("One of `setting` or `method` must be set!")

        # All settings have a unique name.
        if isinstance(self.setting, str):
            self.setting = get_class_with_name(self.setting, all_settings)

        # Each Method also has a unique name.
        if isinstance(self.method, str):
            self.method = get_class_with_name(self.method, all_methods)

        if self.benchmark:
            # If the provided benchmark isn't a path, try to get the value from
            # the `setting_presets` dict. If it isn't in the dict, raise an
            # error.
            if not Path(self.benchmark).is_file():
                if self.benchmark in setting_presets:
                    self.benchmark = setting_presets[self.benchmark]
                else:
                    raise RuntimeError(
                        f"Could not find benchmark '{self.benchmark}': it "
                        f"is neither a path to a file or a key of the "
                        f"`setting_presets` dictionary. \n\n"
                        f"Available presets: \n"
                        + "\n".join(
                            f"- {preset_name}: \t{preset_file.relative_to(os.getcwd())}"
                            for preset_name, preset_file in setting_presets.items()
                        )
                    )
            # Creating an experiment for the given setting, loaded from the
            # config file.
            # TODO: IDEA: Do the same thing for loading the Method?
            logger.info(
                f"Will load the options for the setting from the file "
                f"at path {self.benchmark}."
            )
            drop_extras = True
            if self.setting is None:
                logger.warn(
                    UserWarning(
                        f"You didn't specify which setting to use, so this will "
                        f"try to infer the correct type of setting to use from the "
                        f"contents of the file, which might not work!\n (Consider "
                        f"running this with the `--setting` option instead."
                    )
                )
                # Find the first type of setting that fits the given file.
                drop_extras = False
                self.setting = Setting

            # Raise an error if any of the args in sys.argv would have been used
            # up by the Setting, just to prevent any ambiguities.
            try:
                _, unused_args = self.setting.from_known_args()
            except ImportError as exc:
                # NOTE: An ImportError can occur here because of a missing OpenGL
                # dependency, since when no arguments are passed, the default RL setting
                # is created (cartpole with pixel observations), which requires a render
                # wrapper to be added (which itself uses pyglet, which uses OpenGL).
                logger.warning(
                    RuntimeWarning(f"Unable to check for unused args: {exc}")
                )
                # In this case, we just pretend that no arguments would have been used.
                unused_args = sys.argv[1:]

            ignored_args = list(set(sys.argv[1:]) - set(unused_args))

            if ignored_args:
                # TODO: This could also be trigerred if there were arguments
                # in the method with the same name as some from the Setting.
                raise RuntimeError(
                    f"Cannot pass command-line arguments for the Setting when "
                    f"loading a preset, since these arguments whould have been "
                    f"ignored when creating the setting of type {self.setting} "
                    f"anyway: {ignored_args}"
                )

            assert isclass(self.setting) and issubclass(self.setting, Setting)
            # Actually load the setting from the file.
            self.setting = self.setting.load(
                path=self.benchmark, drop_extra_fields=drop_extras
            )

            if self.method is None:
                raise NotImplementedError(
                    f"For now, you need to specify a Method to use using the "
                    f"`--method` argument when loading the setting from a file."
                )

        if self.setting is not None and self.method is not None:
            if not self.method.is_applicable(self.setting):
                raise RuntimeError(
                    f"Method {self.method} isn't applicable to "
                    f"setting {self.setting}!"
                )

        assert (
            self.setting is None
            or isinstance(self.setting, Setting)
            or issubclass(self.setting, Setting)
        )
        assert (
            self.method is None
            or isinstance(self.method, Method)
            or issubclass(self.method, Method)
        )
Example #40
0
    def _compile(arg):
        if inspect.isclass(arg):
            # NB: It might seem natural to create a subclass here, rather than
            # make a copy of the class to insert the mixin.  Unfortunately, this
            # will break many user classes.  Suppose you have:
            #
            #     @torch.jit.compile
            #     class Foo(Module):
            #         def __init__(self):
            #             super(Foo, self).__init__() # Python 2 syntax!
            #
            # within the class definition, 'Foo' refers to the *decorated*
            # class, not the undecorated class.  This is bad juju if the
            # decorator returns a subclass, since super(Foo, self) is going to
            # refer to the *undecorated* Foo (and thus you have an infinite
            # loop.)  Python 3's argument-less super() does not have this
            # problem, but in general we cannot ask users to rewrite their code.
            #
            # If we create a *copy* of the class (unrelated to the class the
            # user passed in), this problem goes away, because the class
            # __init__ is a part of is indeed Foo.

            old_init = arg.__init__
            # Python 2 has a concept of unbound methods, which are returned when
            # you take a method form a class. They behave just like regular functions,
            # but check the type of the first argument (self). We don't want this here,
            # because self in our __init__ will be an instance of this new class.
            # Python 3 already returns a plain function, so nothing has to be done.
            if sys.version_info[0] == 2:
                old_init = old_init.im_func

            def __init__(self, *args, **kwargs):
                torch._C.CompiledFunction.__init__(self, nderivs, optimize,
                                                   enabled, self.forward,
                                                   arg.__name__)
                try:
                    old_init(self, *args, **kwargs)
                except TypeError as e:
                    # If this fails here, the user probably didn't use this as a class decorator
                    if "super" in str(e):
                        raise_from(
                            TypeError(
                                "torch.jit.compile must be used as a class decorator; "
                                "using it on an already defined class is not valid."
                                "\n\nOriginal error: {}".format(str(e))), e)
                    else:
                        raise
                # NOTE: This can't be done in CompiledFunction constructor,
                # because self.parameters() isn't well defined by then
                # (Module constructor hasn't run yet).
                self.set_captured_vars(list(self.parameters()))

            new_dict = dict(arg.__dict__)
            new_dict['__init__'] = __init__
            new_dict['__call__'] = torch._C.CompiledFunction.__call__
            # NOTE: we don't need to override casting methods, because we only capture
            # parameters, and they mutate their data in-place.
            return type(arg.__name__,
                        arg.__bases__ + (torch._C.CompiledFunction, ),
                        new_dict)
        elif isinstance(arg, Module):
            # It requires work to compile module instances, because you would
            # like the resulting compiled module to look just like the uncompiled
            # version; actually achieving this requires a bit of fanciness.
            # So for now, we just only support the class mechanism.
            raise TypeError("Compiling model instances is not supported.  "
                            "Use @torch.jit.compile on a class instead.")
        elif callable(arg):
            compiled_fn = torch._C.CompiledFunction(nderivs, optimize, enabled,
                                                    arg, arg.__name__)
            return compiled_fn
        else:
            raise TypeError("Cannot handle arg with type {}".format(type(arg)))
def positive_int(value):
    ivalue = int(value)
    if ivalue <= 0:
        raise argparse.ArgumentTypeError("%s is an invalid positive int value" % value)
    return ivalue


def instantiate_agent_from_params(args, class_name, param_prefix, label):
    agent_cls = globals()[args[class_name]]
    agent_params = {key[4:]: val for key, val in args.items() if re.match(f'^{param_prefix}(i|f|b|s_\.)', key)}
    agent = agent_cls(label, **agent_params)
    return agent


avaliable_games = [cls for cls in globals() if cls != 'Game' and inspect.isclass(globals()[cls]) and issubclass(globals()[cls], Game)]
avaliable_agents = [cls for cls in globals() if cls != 'Agent' and inspect.isclass(globals()[cls]) and issubclass(globals()[cls], Agent)]

parser = argparse.ArgumentParser(
    formatter_class=argparse.RawDescriptionHelpFormatter,
    epilog="""You can also add class parameters in format: --<class><param_type>.<param_name>=<param_value>

They will be passed to corresponding class at initialization.

<class> allows the following values:
a1 - first agent class
a2 - second agent class
g - game class.

<param_type> allows the following values:
f - float
Example #42
0
def with_managed_run(autologging_integration, patch_function, tags=None):
    """
    Given a `patch_function`, returns an `augmented_patch_function` that wraps the execution of
    `patch_function` with an active MLflow run. The following properties apply:

        - An MLflow run is only created if there is no active run present when the
          patch function is executed

        - If an active run is created by the `augmented_patch_function`, it is terminated
          with the `FINISHED` state at the end of function execution

        - If an active run is created by the `augmented_patch_function`, it is terminated
          with the `FAILED` if an unhandled exception is thrown during function execution

    Note that, if nested runs or non-fluent runs are created by `patch_function`, `patch_function`
    is responsible for terminating them by the time it terminates
    (or in the event of an exception).

    :param autologging_integration: The autologging integration associated
                                    with the `patch_function`.
    :param patch_function: A `PatchFunction` class definition or a function object
                           compatible with `safe_patch`.
    :param tags: A dictionary of string tags to set on each managed run created during the
                 execution of `patch_function`.
    """
    def create_managed_run():
        managed_run = mlflow.start_run(tags=tags)
        _logger.info(
            "Created MLflow autologging run with ID '%s', which will track hyperparameters,"
            " performance metrics, model artifacts, and lineage information for the"
            " current %s workflow",
            managed_run.info.run_id,
            autologging_integration,
        )
        return managed_run

    if inspect.isclass(patch_function):

        class PatchWithManagedRun(patch_function):
            def __init__(self):
                super(PatchWithManagedRun, self).__init__()
                self.managed_run = None

            def _patch_implementation(self, original, *args, **kwargs):
                if not mlflow.active_run():
                    self.managed_run = try_mlflow_log(create_managed_run)

                result = super(PatchWithManagedRun,
                               self)._patch_implementation(
                                   original, *args, **kwargs)

                if self.managed_run:
                    try_mlflow_log(mlflow.end_run,
                                   RunStatus.to_string(RunStatus.FINISHED))

                return result

            def _on_exception(self, e):
                if self.managed_run:
                    try_mlflow_log(mlflow.end_run,
                                   RunStatus.to_string(RunStatus.FAILED))
                super(PatchWithManagedRun, self)._on_exception(e)

        return PatchWithManagedRun

    else:

        def patch_with_managed_run(original, *args, **kwargs):
            managed_run = None
            if not mlflow.active_run():
                managed_run = try_mlflow_log(create_managed_run)

            try:
                result = patch_function(original, *args, **kwargs)
            except (Exception, KeyboardInterrupt):
                # In addition to standard Python exceptions, handle keyboard interrupts to ensure
                # that runs are terminated if a user prematurely interrupts training execution
                # (e.g. via sigint / ctrl-c)
                if managed_run:
                    try_mlflow_log(mlflow.end_run,
                                   RunStatus.to_string(RunStatus.FAILED))
                raise
            else:
                if managed_run:
                    try_mlflow_log(mlflow.end_run,
                                   RunStatus.to_string(RunStatus.FINISHED))
                return result

        return patch_with_managed_run
Example #43
0
 def __call__(self, obj):
     if inspect.isclass(obj):
         return self._cls_decorator(obj)
     else:
         return self._func_decorator(obj)
Example #44
0
    def load(self) -> None:
        assert not self.loaded

        if self.is_ssl:
            assert self.ssl_certfile
            self.ssl: Optional[ssl.SSLContext] = create_ssl_context(
                keyfile=self.ssl_keyfile,
                certfile=self.ssl_certfile,
                password=self.ssl_keyfile_password,
                ssl_version=self.ssl_version,
                cert_reqs=self.ssl_cert_reqs,
                ca_certs=self.ssl_ca_certs,
                ciphers=self.ssl_ciphers,
            )
        else:
            self.ssl = None

        encoded_headers = [
            (key.lower().encode("latin1"), value.encode("latin1"))
            for key, value in self.headers
        ]
        self.encoded_headers = (
            [(b"server", b"uvicorn")] + encoded_headers
            if b"server" not in dict(encoded_headers) and self.server_header
            else encoded_headers
        )

        if isinstance(self.http, str):
            http_protocol_class = import_from_string(HTTP_PROTOCOLS[self.http])
            self.http_protocol_class: Type[asyncio.Protocol] = http_protocol_class
        else:
            self.http_protocol_class = self.http

        if isinstance(self.ws, str):
            ws_protocol_class = import_from_string(WS_PROTOCOLS[self.ws])
            self.ws_protocol_class: Optional[Type[asyncio.Protocol]] = ws_protocol_class
        else:
            self.ws_protocol_class = self.ws

        self.lifespan_class = import_from_string(LIFESPAN[self.lifespan])

        try:
            self.loaded_app = import_from_string(self.app)
        except ImportFromStringError as exc:
            logger.error("Error loading ASGI app. %s" % exc)
            sys.exit(1)

        try:
            self.loaded_app = self.loaded_app()
        except TypeError as exc:
            if self.factory:
                logger.error("Error loading ASGI app factory: %s", exc)
                sys.exit(1)
        else:
            if not self.factory:
                logger.warning(
                    "ASGI app factory detected. Using it, "
                    "but please consider setting the --factory flag explicitly."
                )

        if self.interface == "auto":
            if inspect.isclass(self.loaded_app):
                use_asgi_3 = hasattr(self.loaded_app, "__await__")
            elif inspect.isfunction(self.loaded_app):
                use_asgi_3 = asyncio.iscoroutinefunction(self.loaded_app)
            else:
                call = getattr(self.loaded_app, "__call__", None)
                use_asgi_3 = asyncio.iscoroutinefunction(call)
            self.interface = "asgi3" if use_asgi_3 else "asgi2"

        if self.interface == "wsgi":
            self.loaded_app = WSGIMiddleware(self.loaded_app)
            self.ws_protocol_class = None
        elif self.interface == "asgi2":
            self.loaded_app = ASGI2Middleware(self.loaded_app)

        if self.debug:
            self.loaded_app = DebugMiddleware(self.loaded_app)
        if logger.level <= TRACE_LOG_LEVEL:
            self.loaded_app = MessageLoggerMiddleware(self.loaded_app)
        if self.proxy_headers:
            self.loaded_app = ProxyHeadersMiddleware(
                self.loaded_app, trusted_hosts=self.forwarded_allow_ips
            )

        self.loaded = True
def use_np_shape(func):
    """A decorator wrapping a function or class with activated NumPy-shape semantics.
    When `func` is a function, this ensures that the execution of the function is scoped with NumPy
    shape semantics, such as the support for zero-dim and zero size tensors. When
    `func` is a class, it ensures that all the methods, static functions, and properties
    of the class are executed with the NumPy shape semantics.

    Example::
        import mxnet as mx
        @mx.use_np_shape
        def scalar_one():
            return mx.nd.ones(())
        print(scalar_one())

        @np.use_np_shape
        class ScalarTensor(object):
            def __init__(self, val=None):
                if val is None:
                    val = ScalarTensor.random().value
                self._scalar = mx.nd.ones(()) * val

            def __repr__(self):
                print("Is __repr__ in np_shape semantics? {}!".format(str(np.is_np_shape())))
                return str(self._scalar.asnumpy())

            @staticmethod
            def random():
                val = mx.nd.random.uniform().asnumpy().item()
                return ScalarTensor(val)

            @property
            def value(self):
                print("Is value property in np_shape semantics? {}!".format(str(np.is_np_shape())))
                return self._scalar.asnumpy().item()


        print("Is global scope of np_shape activated? {}!".format(str(np.is_np_shape())))
        scalar_tensor = ScalarTensor()
        print(scalar_tensor)

    Parameters
    ----------
    func : a user-provided callable function or class to be scoped by the NumPy-shape semantics.

    Returns
    -------
    Function or class
        A function or class wrapped in the NumPy-shape scope.
    """

    if inspect.isclass(func):
        for name, method in inspect.getmembers(
                func,
                predicate=lambda f: inspect.isfunction(f) or inspect.ismethod(
                    f) or isinstance(f, property)):
            if isinstance(method, property):
                setattr(
                    func, name,
                    property(use_np_shape(method.__get__), method.__set__,
                             method.__delattr__, method.__doc__))
            else:
                setattr(func, name, use_np_shape(method))
        return func
    elif callable(func):

        @functools.wraps(func)
        def _with_np_shape(*args, **kwargs):
            with np_shape(active=True):
                return func(*args, **kwargs)

        return _with_np_shape
    else:
        raise TypeError(
            'use_np_shape can only decorate classes and callable objects, '
            'while received a {}'.format(str(type(func))))
Example #46
0
    def _load_fitter_info(self):
        """
        Load fitter info from pytc.
        """

        # get list of Fitter subclasses, sorted by name
        objects = []
        for name, obj in inspect.getmembers(pytc.fitters):
            if inspect.isclass(obj):
                objects.append((name, obj))
        objects.sort()

        self._fitter_classes = []
        self._fitter_vars = []
        self._fitter_widgets = []
        self._fitter_options = []
        self._fitter_names = []
        self._fitter_radio_buttons = []
        self._fitter_defaults = []

        # For every Fitter subclass...
        for name, obj in objects:

            self._fitter_classes.append(obj)

            # Make new widget
            self._fitter_widgets.append(QW.QFrame())
            self._fitter_options.append(
                QW.QFormLayout(self._fitter_widgets[-1]))

            # Add name and radio button to widget
            self._fitter_names.append(name.replace("Fitter", ""))
            self._fitter_radio_buttons.append(
                QW.QRadioButton(self._fitter_names[-1]))
            self._fitter_radio_buttons[-1].toggled.connect(self._select_fit)

            # Figure out arguments for this Fitter subclass
            args = inspect.getargspec(obj)
            if len(args.args) == 1 and args.defaults is None:
                self._fitter_defaults.append({})
            else:
                self._fitter_defaults.append({
                    arg: param
                    for arg, param in zip(args.args[1:], args.defaults)
                })

            fitter_keys = list(self._fitter_defaults[-1].keys())
            fitter_keys.sort()

            # Append fit option
            self._fitter_vars.append({})
            for n in fitter_keys:

                label_name = str(n).replace("_", " ") + ": "
                label = QW.QLabel(label_name.title(), self)
                entry = InputWidget(self._fitter_defaults[-1][n])

                self._fitter_vars[-1][n] = entry
                self._fitter_options[-1].addRow(label, entry)

        # map from name back to index in lists above
        self._fitter_name_to_index = dict([
            (v, i) for i, v in enumerate(self._fitter_names)
        ])
Example #47
0
    def _find_edit_target(shell, args, opts, last_call):
        """Utility method used by magic_edit to find what to edit."""

        def make_filename(arg):
            "Make a filename from the given args"
            arg = unquote_filename(arg)
            try:
                filename = get_py_filename(arg)
            except IOError:
                # If it ends with .py but doesn't already exist, assume we want
                # a new file.
                if arg.endswith('.py'):
                    filename = arg
                else:
                    filename = None
            return filename

        # Set a few locals from the options for convenience:
        opts_prev = 'p' in opts
        opts_raw = 'r' in opts

        # custom exceptions
        class DataIsObject(Exception): pass

        # Default line number value
        lineno = opts.get('n',None)

        if opts_prev:
            args = '_%s' % last_call[0]
            if not shell.user_ns.has_key(args):
                args = last_call[1]

        # use last_call to remember the state of the previous call, but don't
        # let it be clobbered by successive '-p' calls.
        try:
            last_call[0] = shell.displayhook.prompt_count
            if not opts_prev:
                last_call[1] = args
        except:
            pass

        # by default this is done with temp files, except when the given
        # arg is a filename
        use_temp = True

        data = ''

        # First, see if the arguments should be a filename.
        filename = make_filename(args)
        if filename:
            use_temp = False
        elif args:
            # Mode where user specifies ranges of lines, like in %macro.
            data = shell.extract_input_lines(args, opts_raw)
            if not data:
                try:
                    # Load the parameter given as a variable. If not a string,
                    # process it as an object instead (below)

                    #print '*** args',args,'type',type(args)  # dbg
                    data = eval(args, shell.user_ns)
                    if not isinstance(data, basestring):
                        raise DataIsObject

                except (NameError,SyntaxError):
                    # given argument is not a variable, try as a filename
                    filename = make_filename(args)
                    if filename is None:
                        warn("Argument given (%s) can't be found as a variable "
                             "or as a filename." % args)
                        return
                    use_temp = False

                except DataIsObject:
                    # macros have a special edit function
                    if isinstance(data, Macro):
                        raise MacroToEdit(data)

                    # For objects, try to edit the file where they are defined
                    filename = find_file(data)
                    if filename:
                        if 'fakemodule' in filename.lower() and \
                            inspect.isclass(data):
                            # class created by %edit? Try to find source
                            # by looking for method definitions instead, the
                            # __module__ in those classes is FakeModule.
                            attrs = [getattr(data, aname) for aname in dir(data)]
                            for attr in attrs:
                                if not inspect.ismethod(attr):
                                    continue
                                filename = find_file(attr)
                                if filename and \
                                  'fakemodule' not in filename.lower():
                                    # change the attribute to be the edit
                                    # target instead
                                    data = attr
                                    break

                        datafile = 1
                    if filename is None:
                        filename = make_filename(args)
                        datafile = 1
                        warn('Could not find file where `%s` is defined.\n'
                             'Opening a file named `%s`' % (args, filename))
                    # Now, make sure we can actually read the source (if it was
                    # in a temp file it's gone by now).
                    if datafile:
                        if lineno is None:
                            lineno = find_source_lines(data)
                        if lineno is None:
                            filename = make_filename(args)
                            if filename is None:
                                warn('The file `%s` where `%s` was defined '
                                     'cannot be read.' % (filename, data))
                                return
                    use_temp = False

        if use_temp:
            filename = shell.mktempfile(data)
            print 'IPython will make a temporary file named:',filename

        return filename, lineno, use_temp
def use_np_array(func):
    """A decorator wrapping Gluon `Block`s and all its methods, properties, and static functions
    with the semantics of NumPy-array, which means that where ndarrays are created,
    `mxnet.numpy.ndarray`s should be created, instead of legacy ndarrays of type `mx.nd.NDArray`.
    For example, at the time when a parameter is created in a `Block`, an `mxnet.numpy.ndarray`
    is created if it's decorated with this decorator.

    Example::
        import mxnet as mx
        from mxnet import gluon, np


        class TestHybridBlock1(gluon.HybridBlock):
            def __init__(self):
                super(TestHybridBlock1, self).__init__()
                self.w = self.params.get('w', shape=(2, 2))

            def hybrid_forward(self, F, x, w):
                return F.dot(x, w)


        x = mx.nd.ones((2, 2))
        net1 = TestHybridBlock1()
        net1.initialize()
        out = net1.forward(x)
        for _, v in net1.collect_params().items():
            assert type(v.data()) is mx.nd.NDArray
        assert type(out) is mx.nd.NDArray


        @np.use_np_array
        class TestHybridBlock2(gluon.HybridBlock):
            def __init__(self):
                super(TestHybridBlock2, self).__init__()
                self.w = self.params.get('w', shape=(2, 2))

            def hybrid_forward(self, F, x, w):
                return F.np.dot(x, w)


        x = np.ones((2, 2))
        net2 = TestHybridBlock2()
        net2.initialize()
        out = net2.forward(x)
        for _, v in net2.collect_params().items():
            print(type(v.data()))
            assert type(v.data()) is np.ndarray
        assert type(out) is np.ndarray

    Parameters
    ----------
    func : a user-provided callable function or class to be scoped by the NumPy-array semantics.

    Returns
    -------
    Function or class
        A function or class wrapped in the NumPy-array scope.
    """
    if inspect.isclass(func):
        for name, method in inspect.getmembers(
                func,
                predicate=lambda f: inspect.isfunction(f) or inspect.ismethod(
                    f) or isinstance(f, property)):
            if isinstance(method, property):
                setattr(
                    func, name,
                    property(use_np_array(method.__get__), method.__set__,
                             method.__delattr__, method.__doc__))
            else:
                setattr(func, name, use_np_array(method))
        return func
    elif callable(func):

        @functools.wraps(func)
        def _with_np_array(*args, **kwargs):
            with np_array(active=True):
                return func(*args, **kwargs)

        return _with_np_array
    else:
        raise TypeError(
            'use_np_array can only decorate classes and callable objects, '
            'while received a {}'.format(str(type(func))))
Example #49
0
def _lookfor_generate_cache(module, import_modules, regenerate):
    """
    Generate docstring cache for given module.

    Parameters
    ----------
    module : str, None, module
        Module for which to generate docstring cache
    import_modules : bool
        Whether to import sub-modules in packages.
    regenerate : bool
        Re-generate the docstring cache

    Returns
    -------
    cache : dict {obj_full_name: (docstring, kind, index), ...}
        Docstring cache for the module, either cached one (regenerate=False)
        or newly generated.

    """
    global _lookfor_caches
    # Local import to speed up numpy's import time.
    import inspect

    if sys.version_info[0] >= 3:
        # In Python3 stderr, stdout are text files.
        from io import StringIO
    else:
        from StringIO import StringIO

    if module is None:
        module = "numpy"

    if isinstance(module, str):
        try:
            __import__(module)
        except ImportError:
            return {}
        module = sys.modules[module]
    elif isinstance(module, list) or isinstance(module, tuple):
        cache = {}
        for mod in module:
            cache.update(_lookfor_generate_cache(mod, import_modules,
                                                 regenerate))
        return cache

    if id(module) in _lookfor_caches and not regenerate:
        return _lookfor_caches[id(module)]

    # walk items and collect docstrings
    cache = {}
    _lookfor_caches[id(module)] = cache
    seen = {}
    index = 0
    stack = [(module.__name__, module)]
    while stack:
        name, item = stack.pop(0)
        if id(item) in seen:
            continue
        seen[id(item)] = True

        index += 1
        kind = "object"

        if inspect.ismodule(item):
            kind = "module"
            try:
                _all = item.__all__
            except AttributeError:
                _all = None

            # import sub-packages
            if import_modules and hasattr(item, '__path__'):
                for pth in item.__path__:
                    for mod_path in os.listdir(pth):
                        this_py = os.path.join(pth, mod_path)
                        init_py = os.path.join(pth, mod_path, '__init__.py')
                        if (os.path.isfile(this_py) and
                                mod_path.endswith('.py')):
                            to_import = mod_path[:-3]
                        elif os.path.isfile(init_py):
                            to_import = mod_path
                        else:
                            continue
                        if to_import == '__init__':
                            continue

                        try:
                            old_stdout = sys.stdout
                            old_stderr = sys.stderr
                            try:
                                sys.stdout = StringIO()
                                sys.stderr = StringIO()
                                __import__("%s.%s" % (name, to_import))
                            finally:
                                sys.stdout = old_stdout
                                sys.stderr = old_stderr
                        # Catch SystemExit, too
                        except BaseException:
                            continue

            for n, v in _getmembers(item):
                try:
                    item_name = getattr(v, '__name__', "%s.%s" % (name, n))
                    mod_name = getattr(v, '__module__', None)
                except NameError:
                    # ref. SWIG's global cvars
                    #    NameError: Unknown C global variable
                    item_name = "%s.%s" % (name, n)
                    mod_name = None
                if '.' not in item_name and mod_name:
                    item_name = "%s.%s" % (mod_name, item_name)

                if not item_name.startswith(name + '.'):
                    # don't crawl "foreign" objects
                    if isinstance(v, ufunc):
                        # ... unless they are ufuncs
                        pass
                    else:
                        continue
                elif not (inspect.ismodule(v) or _all is None or n in _all):
                    continue
                stack.append(("%s.%s" % (name, n), v))
        elif inspect.isclass(item):
            kind = "class"
            for n, v in _getmembers(item):
                stack.append(("%s.%s" % (name, n), v))
        elif hasattr(item, "__call__"):
            kind = "func"

        try:
            doc = inspect.getdoc(item)
        except NameError:
            # ref SWIG's NameError: Unknown C global variable
            doc = None
        if doc is not None:
            cache[name] = (doc, kind, index)

    return cache
Example #50
0
def module_to_dict(module, exclude=[]):
    return dict([(x, getattr(module, x)) for x in dir(module)
                 if isclass(getattr(module, x)) and x not in exclude
                 and getattr(module, x) not in exclude])
Example #51
0
    def run(self):
        global _plot_count

        #
        # error checking
        #
        allowed_layouts = set(['code', 'output', 'interleave', 'plot'])

        if 'layout' in self.options:
            layout = [s.strip() for s in self.options['layout'].split(',')]
        else:
            layout = ['code']

        if len(layout) > len(set(layout)):
            raise SphinxError("No duplicate layout entries allowed.")

        bad = [n for n in layout if n not in allowed_layouts]
        if bad:
            raise SphinxError("The following layout options are invalid: %s" %
                              bad)

        if 'interleave' in layout and ('code' in layout or 'output' in layout):
            raise SphinxError(
                "The interleave option is mutually exclusive to the code "
                "and output options.")

        #
        # Get the source code
        #
        path = self.arguments[0]
        try:
            source, indent, module, class_ = get_source_code(path)
        except Exception as err:
            # Generally means the source couldn't be inspected or imported.
            # Raise as a Directive warning (level 2 in docutils).
            # This way, the sphinx build does not terminate if, for example, you are building on
            # an environment where mpi or pyoptsparse are missing.
            raise self.directive_error(2, str(err))

        #
        # script, test and/or plot?
        #
        is_script = path.endswith('.py')

        is_test = class_ is not None and inspect.isclass(
            class_) and issubclass(class_, unittest.TestCase)

        shows_plot = re.compile('|'.join(plotting_functions)).search(source)

        if 'plot' in layout:
            plot_dir = os.getcwd()
            plot_fname = 'doc_plot_%d.png' % _plot_count
            _plot_count += 1

            plot_file_abs = os.path.join(os.path.abspath(plot_dir), plot_fname)
            if os.path.isfile(plot_file_abs):
                # remove any existing plot file
                os.remove(plot_file_abs)

        #
        # Modify the source prior to running
        #
        if 'strip-docstrings' in self.options:
            source = remove_docstrings(source)

        if is_test:
            try:
                source = replace_asserts_with_prints(
                    dedent(strip_header(source)))
                source = remove_initial_empty_lines(source)

                class_name = class_.__name__
                method_name = path.rsplit('.', 1)[1]

                # make 'self' available to test code (as an instance of the test case)
                self_code = "from %s import %s\nself = %s('%s')\n" % \
                            (module.__name__, class_name, class_name, method_name)

                # get setUp and tearDown but don't duplicate if it is the method being tested
                setup_code = '' if method_name == 'setUp' else dedent(
                    strip_header(
                        remove_docstrings(
                            inspect.getsource(getattr(class_, 'setUp')))))

                teardown_code = '' if method_name == 'tearDown' else dedent(
                    strip_header(
                        remove_docstrings(
                            inspect.getsource(getattr(class_, 'tearDown')))))

                # for interleaving, we need to mark input/output blocks
                if 'interleave' in layout:
                    source = insert_output_start_stop_indicators(source)

                code_to_run = '\n'.join(
                    [self_code, setup_code, source, teardown_code]).strip()
            except Exception:
                err = traceback.format_exc()
                raise SphinxError("Problem with embed of " + path + ": \n" +
                                  str(err))
        else:
            if indent > 0:
                source = dedent(source)
            if 'interleave' in layout:
                source = insert_output_start_stop_indicators(source)
            code_to_run = source[:]

        #
        # Run the code (if necessary)
        #
        skipped = failed = False

        if 'output' in layout or 'interleave' in layout or 'plot' in layout:

            imports_not_required = 'imports-not-required' in self.options

            if shows_plot:
                # import matplotlib AFTER __future__ (if it's there)
                mpl_import = "\nimport matplotlib\nmatplotlib.use('Agg')\n"
                idx = code_to_run.find("from __future__")
                idx = code_to_run.find('\n', idx) if idx >= 0 else 0
                code_to_run = code_to_run[:idx] + mpl_import + code_to_run[idx:]

                if 'plot' in layout:
                    code_to_run = code_to_run + (
                        '\nmatplotlib.pyplot.savefig("%s")' % plot_file_abs)

            skipped, failed, run_outputs = run_code(
                code_to_run,
                path,
                module=module,
                cls=class_,
                imports_not_required=imports_not_required,
                shows_plot=shows_plot)

        #
        # Handle output
        #
        if failed:
            # Failed cases raised as a Directive warning (level 2 in docutils).
            # This way, the sphinx build does not terminate if, for example, you are building on
            # an environment where mpi or pyoptsparse are missing.
            raise self.directive_error(2, run_outputs)
        elif skipped:
            io_nodes = [get_skip_output_node(run_outputs)]
        else:
            if 'output' in layout:
                output_blocks = run_outputs if isinstance(
                    run_outputs, list) else [run_outputs]

            elif 'interleave' in layout:
                if is_test:
                    start = len(self_code) + len(setup_code)
                    end = len(code_to_run) - len(teardown_code)
                    input_blocks = split_source_into_input_blocks(
                        code_to_run[start:end])
                else:
                    input_blocks = split_source_into_input_blocks(code_to_run)

                output_blocks = extract_output_blocks(run_outputs)

                # Merge any input blocks for which there is no corresponding output
                # with subsequent input blocks that do have output
                input_blocks = consolidate_input_blocks(
                    input_blocks, output_blocks)

            if 'plot' in layout:
                if not os.path.isfile(plot_file_abs):
                    raise SphinxError("Can't find plot file '%s'" %
                                      plot_file_abs)

                directive_dir = os.path.relpath(
                    os.getcwd(),
                    os.path.dirname(self.state.document.settings._source))
                # this filename must NOT contain an absolute path, else the Figure will not
                # be able to find the image file in the generated html dir.
                plot_file = os.path.join(directive_dir, plot_fname)

                # create plot node
                fig = images.Figure(self.name, [plot_file], self.options,
                                    self.content, self.lineno,
                                    self.content_offset, self.block_text,
                                    self.state, self.state_machine)
                plot_nodes = fig.run()

        #
        # create a list of document nodes to return based on layout
        #
        doc_nodes = []
        skip_fail_shown = False
        for opt in layout:
            if opt == 'code':
                # we want the body of code to be formatted and code highlighted
                body = nodes.literal_block(source, source)
                body['language'] = 'python'
                doc_nodes.append(body)
            elif skipped:
                if not skip_fail_shown:
                    doc_nodes.extend(io_nodes)
                    skip_fail_shown = True
            else:
                if opt == 'interleave':
                    doc_nodes.extend(
                        get_interleaved_io_nodes(input_blocks, output_blocks))
                elif opt == 'output':
                    doc_nodes.append(get_output_block_node(output_blocks))
                else:  # plot
                    doc_nodes.extend(plot_nodes)

        return doc_nodes
    def test_simpleChainingTest(self):
        g = Grammar(terminals=[0, 1],
                    nonterminals=[S, A, B, C],
                    rules=[Rules],
                    start_symbol=S)
        com = ContextFree.remove_rules_with_epsilon(g)
        self.assertEqual(len(com.rules()), 12)

        class RuleNewStoBC(Rule):
            rule = ([S], [B, C])

        class RuleNewStoAC(Rule):
            rule = ([S], [A, C])

        class RuleNewStoAB(Rule):
            rule = ([S], [A, B])

        class RuleNewAto0(Rule):
            rule = ([A], [0])

        class RuleNewStoB(Rule):
            rule = ([S], [B])

        class RuleNewStoC(Rule):
            rule = ([S], [C])

        class RuleNewStoA(Rule):
            rule = ([S], [A])

        class RuleNewStoEPS(Rule):
            rule = ([S], [EPS])

        self.assertTrue(
            com.have_rule([
                RuleNewStoBC, RuleNewStoAC, RuleNewStoAB, RuleNewAto0,
                RuleNewStoB, RuleNewStoC, RuleNewStoA, RuleNewStoEPS
            ]))
        fromStoBC = com.get_rule(RuleNewStoBC)
        self.assertTrue(isclass(fromStoBC))
        self.assertTrue(issubclass(fromStoBC, ContextFree.EpsilonRemovedRule))
        self.assertEqual(fromStoBC.from_rule.rule, ([S], [A, B, C]))
        self.assertEqual(fromStoBC.replace_index, 0)
        fromStoAC = com.get_rule(RuleNewStoAC)
        self.assertTrue(isclass(fromStoAC))
        self.assertTrue(issubclass(fromStoAC, ContextFree.EpsilonRemovedRule))
        self.assertEqual(fromStoAC.from_rule.rule, ([S], [A, B, C]))
        self.assertEqual(fromStoAC.replace_index, 1)
        fromStoAB = com.get_rule(RuleNewStoAB)
        self.assertTrue(isclass(fromStoAB))
        self.assertTrue(issubclass(fromStoAB, ContextFree.EpsilonRemovedRule))
        self.assertEqual(fromStoAB.from_rule.rule, ([S], [A, B, C]))
        self.assertEqual(fromStoAB.replace_index, 2)
        fromAto0 = com.get_rule(RuleNewAto0)
        self.assertTrue(isclass(fromAto0))
        self.assertTrue(issubclass(fromAto0, ContextFree.EpsilonRemovedRule))
        self.assertEqual(fromAto0.from_rule.rule, ([A], [0, A]))
        self.assertEqual(fromAto0.replace_index, 1)
        fromStoA = com.get_rule(RuleNewStoA)
        self.assertTrue(isclass(fromStoA))
        self.assertTrue(issubclass(fromStoA, ContextFree.EpsilonRemovedRule))
        self.assertEqual(fromStoA.from_rule.rule, ([S], [A, C]))
        self.assertEqual(fromStoA.replace_index, 1)
        fromStoB = com.get_rule(RuleNewStoB)
        self.assertTrue(isclass(fromStoB))
        self.assertTrue(issubclass(fromStoB, ContextFree.EpsilonRemovedRule))
        self.assertEqual(fromStoB.from_rule.rule, ([S], [B, C]))
        self.assertEqual(fromStoB.replace_index, 1)
        fromStoC = com.get_rule(RuleNewStoC)
        self.assertTrue(isclass(fromStoC))
        self.assertTrue(issubclass(fromStoC, ContextFree.EpsilonRemovedRule))
        self.assertEqual(fromStoC.from_rule.rule, ([S], [B, C]))
        self.assertEqual(fromStoC.replace_index, 0)
        fromStoEPS = com.get_rule(RuleNewStoEPS)
        self.assertTrue(isclass(fromStoEPS))
        self.assertTrue(issubclass(fromStoEPS, ContextFree.EpsilonRemovedRule))
        self.assertEqual(fromStoEPS.from_rule.rule, ([S], [C]))
        self.assertEqual(fromStoEPS.replace_index, 0)

        class RuleOldAtoEps(Rule):
            rule = ([A], [EPS])

        class RuleOldBtoEps(Rule):
            rule = ([B], [EPS])

        class RuleOldCtoEps(Rule):
            rule = ([C], [EPS])

        self.assertFalse(com.have_rule(RuleOldAtoEps))
        self.assertFalse(com.have_rule(RuleOldBtoEps))
        self.assertFalse(com.have_rule(RuleOldCtoEps))
 def select(obj):
     return (inspect.isclass(obj) and issubclass(obj, base_class) and
             (not strict or obj != base_class))
Example #54
0
def info(object=None, maxwidth=76, output=sys.stdout, toplevel='numpy'):
    """
    Get help information for a function, class, or module.

    Parameters
    ----------
    object : object or str, optional
        Input object or name to get information about. If `object` is a
        numpy object, its docstring is given. If it is a string, available
        modules are searched for matching objects.  If None, information
        about `info` itself is returned.
    maxwidth : int, optional
        Printing width.
    output : file like object, optional
        File like object that the output is written to, default is
        ``stdout``.  The object has to be opened in 'w' or 'a' mode.
    toplevel : str, optional
        Start search at this level.

    See Also
    --------
    source, lookfor

    Notes
    -----
    When used interactively with an object, ``np.info(obj)`` is equivalent
    to ``help(obj)`` on the Python prompt or ``obj?`` on the IPython
    prompt.

    Examples
    --------
    >>> np.info(np.polyval) # doctest: +SKIP
       polyval(p, x)
         Evaluate the polynomial p at x.
         ...

    When using a string for `object` it is possible to get multiple results.

    >>> np.info('fft') # doctest: +SKIP
         *** Found in numpy ***
    Core FFT routines
    ...
         *** Found in numpy.fft ***
     fft(a, n=None, axis=-1)
    ...
         *** Repeat reference found in numpy.fft.fftpack ***
         *** Total of 3 references found. ***

    """
    global _namedict, _dictlist
    # Local import to speed up numpy's import time.
    import pydoc
    import inspect

    if (hasattr(object, '_ppimport_importer') or
           hasattr(object, '_ppimport_module')):
        object = object._ppimport_module
    elif hasattr(object, '_ppimport_attr'):
        object = object._ppimport_attr

    if object is None:
        info(info)
    elif isinstance(object, ndarray):
        _info(object, output=output)
    elif isinstance(object, str):
        if _namedict is None:
            _namedict, _dictlist = _makenamedict(toplevel)
        numfound = 0
        objlist = []
        for namestr in _dictlist:
            try:
                obj = _namedict[namestr][object]
                if id(obj) in objlist:
                    print("\n     "
                          "*** Repeat reference found in %s *** " % namestr,
                          file=output
                          )
                else:
                    objlist.append(id(obj))
                    print("     *** Found in %s ***" % namestr, file=output)
                    info(obj)
                    print("-"*maxwidth, file=output)
                numfound += 1
            except KeyError:
                pass
        if numfound == 0:
            print("Help for %s not found." % object, file=output)
        else:
            print("\n     "
                  "*** Total of %d references found. ***" % numfound,
                  file=output
                  )

    elif inspect.isfunction(object):
        name = object.__name__
        arguments = formatargspec(*getargspec(object))

        if len(name+arguments) > maxwidth:
            argstr = _split_line(name, arguments, maxwidth)
        else:
            argstr = name + arguments

        print(" " + argstr + "\n", file=output)
        print(inspect.getdoc(object), file=output)

    elif inspect.isclass(object):
        name = object.__name__
        arguments = "()"
        try:
            if hasattr(object, '__init__'):
                arguments = formatargspec(
                        *getargspec(object.__init__.__func__)
                        )
                arglist = arguments.split(', ')
                if len(arglist) > 1:
                    arglist[1] = "("+arglist[1]
                    arguments = ", ".join(arglist[1:])
        except Exception:
            pass

        if len(name+arguments) > maxwidth:
            argstr = _split_line(name, arguments, maxwidth)
        else:
            argstr = name + arguments

        print(" " + argstr + "\n", file=output)
        doc1 = inspect.getdoc(object)
        if doc1 is None:
            if hasattr(object, '__init__'):
                print(inspect.getdoc(object.__init__), file=output)
        else:
            print(inspect.getdoc(object), file=output)

        methods = pydoc.allmethods(object)
        if methods != []:
            print("\n\nMethods:\n", file=output)
            for meth in methods:
                if meth[0] == '_':
                    continue
                thisobj = getattr(object, meth, None)
                if thisobj is not None:
                    methstr, other = pydoc.splitdoc(
                            inspect.getdoc(thisobj) or "None"
                            )
                print("  %s  --  %s" % (meth, methstr), file=output)

    elif (sys.version_info[0] < 3
            and isinstance(object, types.InstanceType)):
        # check for __call__ method
        # types.InstanceType is the type of the instances of oldstyle classes
        print("Instance of class: ", object.__class__.__name__, file=output)
        print(file=output)
        if hasattr(object, '__call__'):
            arguments = formatargspec(
                    *getargspec(object.__call__.__func__)
                    )
            arglist = arguments.split(', ')
            if len(arglist) > 1:
                arglist[1] = "("+arglist[1]
                arguments = ", ".join(arglist[1:])
            else:
                arguments = "()"

            if hasattr(object, 'name'):
                name = "%s" % object.name
            else:
                name = "<name>"
            if len(name+arguments) > maxwidth:
                argstr = _split_line(name, arguments, maxwidth)
            else:
                argstr = name + arguments

            print(" " + argstr + "\n", file=output)
            doc = inspect.getdoc(object.__call__)
            if doc is not None:
                print(inspect.getdoc(object.__call__), file=output)
            print(inspect.getdoc(object), file=output)

        else:
            print(inspect.getdoc(object), file=output)

    elif inspect.ismethod(object):
        name = object.__name__
        arguments = formatargspec(
                *getargspec(object.__func__)
                )
        arglist = arguments.split(', ')
        if len(arglist) > 1:
            arglist[1] = "("+arglist[1]
            arguments = ", ".join(arglist[1:])
        else:
            arguments = "()"

        if len(name+arguments) > maxwidth:
            argstr = _split_line(name, arguments, maxwidth)
        else:
            argstr = name + arguments

        print(" " + argstr + "\n", file=output)
        print(inspect.getdoc(object), file=output)

    elif hasattr(object, '__doc__'):
        print(inspect.getdoc(object), file=output)
Example #55
0
# we are using the same data set repeatedly, so create a cache external to the equations
externalCache = pyeq3.dataCache()
reducedDataCache = {}

#####################################################
# this value is used to make the example run faster #
#####################################################
smoothnessControl = 3

##########################
# fit named equations here
for submodule in inspect.getmembers(pyeq3.Models_2D):
    if inspect.ismodule(submodule[1]):
        for equationClass in inspect.getmembers(submodule[1]):
            if inspect.isclass(equationClass[1]):

                # special classes
                if equationClass[1].splineFlag or \
                   equationClass[1].userSelectablePolynomialFlag or \
                   equationClass[1].userCustomizablePolynomialFlag or \
                   equationClass[1].userSelectablePolyfunctionalFlag or \
                   equationClass[1].userSelectableRationalFlag or \
                   equationClass[1].userDefinedFunctionFlag:
                    continue

                for extendedVersion in ['Default', 'Offset']:

                    if (extendedVersion == 'Offset') and (
                            equationClass[1].autoGenerateOffsetForm == False):
                        continue
import inspect
import io
import os
import sys
import warnings
from json import loads

# Bokeh imports
import bokeh.models as models
from bokeh.core.json_encoder import serialize_json
from bokeh.model import Model
from bokeh.util.warnings import BokehDeprecationWarning

dest_dir = sys.argv[1]

classes = [member for name, member in inspect.getmembers(models) if inspect.isclass(member)]
model_class = next(klass for klass in classes if klass.__name__ == 'Model')

# getclasstree returns a list which contains [ (class, parentClass), [(subClassOfClass, class), ...]]
# where the subclass list is omitted if there are no subclasses.
# If you say unique=True then mixins will be registered as leaves so don't use unique=True,
# and expect to have duplicates in the result of leaves()
all_tree = inspect.getclasstree(classes, unique=False)

def leaves(tree, underneath):
    if len(tree) == 0:
        return []
    elif len(tree) > 1 and isinstance(tree[1], list):
        subs = tree[1]
        if underneath is None or tree[0][0] != underneath:
            return leaves(subs, underneath) + leaves(tree[2:], underneath)
Example #57
0
def _find_context(broker):
    for k, v in broker.instances.items():
        if inspect.isclass(k) and issubclass(k, ExecutionContext):
            return v
Example #58
0
def get_source(object, source_file):
    import inspect, linecache, re
    file = source_file
    if file:
        # Invalidate cache if needed.
        linecache.checkcache(file)
    else:
        file = inspect.getfile(object)
        # Allow filenames in form of "<something>" to pass through.
        # `doctest` monkeypatches `linecache` module to enable
        # inspection, so let `linecache.getlines` to be called.
        if not (file.startswith('<') and file.endswith('>')):
            raise OSError('source code not available')

    if file not in linescache:
        with open(file, 'r', encoding='utf-8') as f:
            data = f.read()
            lines = data.split("\n")

    else:
        lines = linescache[file]

    if inspect.ismodule(object):
        return lines, 0

    if inspect.isclass(object):
        name = object.__name__
        pat = re.compile(r'^(\s*)class\s*' + name + r'\b')
        # make some effort to find the best matching class definition:
        # use the one with the least indentation, which is the one
        # that's most probably not inside a function definition.
        candidates = []
        for i in range(len(lines)):
            match = pat.match(lines[i])
            if match:
                # if it's at toplevel, it's already the best one
                if lines[i][0] == 'c':
                    return lines, i
                # else add whitespace to candidate list
                candidates.append((match.group(1), i))
        if candidates:
            # this will sort by whitespace, and by line number,
            # less whitespace first
            candidates.sort()
            return lines, candidates[0][1]
        else:
            raise OSError('could not find class definition')

    if inspect.ismethod(object):
        object = object.__func__
    if inspect.isfunction(object):
        object = object.__code__
    if inspect.istraceback(object):
        object = object.tb_frame
    if inspect.isframe(object):
        object = object.f_code
    if inspect.iscode(object):
        if not hasattr(object, 'co_firstlineno'):
            raise OSError('could not find function definition')
        lnum = object.co_firstlineno - 1
        pat = re.compile(r'^(\s*def\s)|(\s*async\s+def\s)|(.*(?<!\w)lambda(:|\s))|^(\s*@)')
        while lnum > 0:
            if pat.match(lines[lnum]): break
            lnum = lnum - 1
        return lines, lnum
    raise OSError('could not find code object')
Example #59
0
    def test_readdate(self):
        for s, date, american, lax in (
            ("22 maart 1980", datetime.datetime(1980, 3, 22, 0, 0,
                                                0), False, True),
            ("22 mrt 1980", datetime.datetime(1980, 3, 22, 0, 0,
                                              0), False, True),
            ("22/3/1980", datetime.datetime(1980, 3, 22, 0, 0,
                                            0), False, True),
            ("1980-3-22", datetime.datetime(1980, 3, 22, 0, 0,
                                            0), False, True),
            ("1980-3-22T01:00:05", datetime.datetime(1980, 3, 22, 1, 0,
                                                     5), False, True),
            ("1980-3-22 01:00", datetime.datetime(1980, 3, 22, 1, 0,
                                                  0), False, True),
            ("1980-3-22 01:00 PM", datetime.datetime(1980, 3, 22, 13, 0,
                                                     0), False, True),
            ("1980-3-22 01:00:00:00", datetime.datetime(1980, 3, 22, 0, 0, 0),
             False, True),  #time->0
            ("1980-13-22 01:00:00:00", None, False,
             True),  # illegal date --> None
            ("1980-13-22 01:00:00", ValueError, False,
             False),  # illegal date --> Error
            ("1980-3-22 27:00:00", ValueError, False,
             False),  # illegal time --> Error
            ("1980-3-22 23:00:00:00", ValueError, False,
             False),  # illegal time --> Error
            ("Sun Sep 29 18:21:12 +0000 2013",
             datetime.datetime(2013, 9, 29, 18, 21,
                               12), False, False),  # twitter (??)
            ("1/1/98", datetime.datetime(1998, 1, 1, 0, 0, 0), False, True),
            ("1/1/04", datetime.datetime(2004, 1, 1, 0, 0, 0), False, True),
            ("31/12/72", datetime.datetime(1972, 12, 31, 0, 0,
                                           0), False, True),
            ("12/31/72", datetime.datetime(1972, 12, 31, 0, 0, 0), True, True),
            ("1/2/1972", datetime.datetime(1972, 2, 1, 0, 0, 0), False, True),
            ("1/2/1972", datetime.datetime(1972, 1, 2, 0, 0, 0), True, True),
            ("1/2/1972", datetime.datetime(1972, 1, 2, 0, 0, 0), True, True),
            ("30.09.2008", datetime.datetime(2008, 9, 30, 0, 0,
                                             0), False, False),
            ("31. Januar 2009", datetime.datetime(2009, 1, 31, 0, 0,
                                                  0), False, True),
            ("December 31, 2009 Thursday",
             datetime.datetime(2009, 12, 31, 0, 0, 0), False, False),
            (u'30 ao\xfbt 2002', datetime.datetime(2002, 8, 30, 0, 0,
                                                   0), False, False),
            ('31. Maerz 2003', datetime.datetime(2003, 3, 31, 0, 0,
                                                 0), False, False),
            ('September 1, 2008 Monday 12:44 PM AEST',
             datetime.datetime(2008, 9, 1, 12, 44), False, False),
            ('23aug2013', datetime.datetime(2013, 8, 23, 0, 0,
                                            0), False, False),
        ):

            if inspect.isclass(date) and issubclass(date, Exception):
                self.assertRaises(date,
                                  toolkit.readDate,
                                  s,
                                  lax=False,
                                  american=american)
            else:
                date2 = toolkit.readDate(s, lax=lax, american=american)
                self.assertEqual(date2, date)
Example #60
0
    has_touched_functions, has_untouched_functions, is_touched_functions, is_untouched_functions, \
    when_harvested_functions, when_scared_functions

modules_list = [
    become_possessed_functions, become_unpossessed_functions, idle_functions,
    has_touched_functions, has_untouched_functions, is_touched_functions,
    is_untouched_functions, when_harvested_functions, when_scared_functions
]

all_functions_dict = {}

for m in modules_list:
    funcs = {}
    for i, element in enumerate(dir(m)):
        f = getattr(m, element)
        if inspect.isclass(f):
            if not "Function" in f.__name__:
                funcs[f.__name__] = f

    all_functions_dict[m.__name__[
        26:]] = funcs  # cuts off "gslib.character_functions." from module name

    del funcs


def load_function(owner, module_name, name, d):
    func = all_functions_dict[module_name][name](owner)
    func.load_from_dict(d)
    return func