def __new__(cls, name, bases, spec): for key, value in spec.items(): if type(value) is classmethod: value = value.__func__ doc = getattr(value, "__doc__", "{pdoc}") pdoc = "" for b in bases: try: pdoc = getdoc(getattr(b, key)) except (IndexError, AttributeError): pdoc = "" if pdoc: break try: value.__doc__ = doc.format(pdoc=pdoc) except AttributeError: pass if bases: pdoc = getattr(bases[0], "__doc__") if "__doc__" in spec and spec["__doc__"]: spec["__doc__"] = spec["__doc__"].format(pdoc=pdoc) else: spec["__doc__"] = pdoc return ABCMeta.__new__(cls, name, bases, spec)
def __new__(mcls, name, bases, namespace): my_fields = namespace.get('_fields') base_fields = None for base in bases: base_fields = getattr(base, '_fields', None) if base_fields is not None: break my_fields = list(namedtuple('_', my_fields or '')._fields) if base_fields: base_fields = list(namedtuple('_', base_fields)._fields) base_fields.extend(k for k in dir(base) if isinstance(getattr(base, k), property)) inner_name = 'inner_%s' % base.__name__.lower() my_fields.insert(0, inner_name) for f in base_fields: namespace.setdefault( f, property(operator.attrgetter('%s.%s' % (inner_name, f)))) if my_fields: basetuple = namedtuple(name, my_fields) bases = (basetuple, ) + bases namespace.pop('_fields', None) namespace.setdefault('__doc__', basetuple.__doc__) namespace.setdefault('__slots__', ()) return ABCMeta.__new__(mcls, name, bases, namespace)
def __new__(cls, name: str, bases: tuple, namespace: dict) -> Any: cls = _ABCMeta.__new__(cls, name, bases, namespace) if not bases: return cls abstract_attributes = set() abstract_class_method = set() for name in dir(cls): attr = getattr(cls, name) if getattr(attr, "__is_abstract_attribute__", False): abstract_attributes.add(name) elif getattr(attr, "__isabstractmethod__", False) and inspect.ismethod( attr ): abstract_class_method.add(name) if abstract_attributes: raise NotImplementedError( "Can't create class {} with abstract class attributes: {}".format( cls.__name__, ", ".join(abstract_attributes) ) ) if abstract_class_method: raise NotImplementedError( "Can't create class {} with abstract classmethod: {}".format( cls.__name__, ", ".join(abstract_class_method) ) ) return cls
def __new__(mcls, name, bases, namespace, **kwargs): model = namespace.get('__model__') mapped_properties = namespace.get('__mapped_properties__') if not model: raise TypeError( f'Model-mapped class \'{name}\' attribute \'__model__\' must be set to mapped model.' ) if mcls.__mapping__.get(model): return mcls.__mapping__.get(model) schema = [] for attribute in mapped_properties: if not isinstance(attribute, ModelMappedProperty): raise TypeError( f'Mapped property \'{attribute.name}\' should be of type \'MappedProperty\'.' ) schema.append(attribute.name) if attribute.autogenerate: namespace[attribute.name] = attribute.generate_property() namespace['__schema__'] = tuple(schema) def fields(cls): return cls.__schema__ namespace['fields'] = classmethod(fields) type_ = ABCMeta.__new__(mcls, name, bases, namespace, **kwargs) mcls.__mapping__[model] = type_ return type_
def __new__(cls, name, bases, attrs): newcls = ABCMeta.__new__(cls, name, bases, attrs) newcls._logger = logging.getLogger( '{:s}.{:s}'.format(attrs['__module__'], name)) # Note: the reverse logic (from type_string to name that can # be passed to the plugin loader) is implemented in # aiida.common.pluginloader. prefix = "aiida.orm." if attrs['__module__'].startswith(prefix): # Strip aiida.orm. # Append a dot at the end, always newcls._plugin_type_string = "{}.{}.".format( attrs['__module__'][len(prefix):], name) # Make sure the pugin implementation match the import name. # If you have implementation.django.calculation.job, we remove # the first part to only get calculation.job. if newcls._plugin_type_string.startswith('implementation.'): newcls._plugin_type_string = \ '.'.join(newcls._plugin_type_string.split('.')[2:]) if newcls._plugin_type_string == 'node.Node.': newcls._plugin_type_string = '' newcls._query_type_string = get_query_type_string( newcls._plugin_type_string ) else: raise InternalError("Class {} is not in a module under " "aiida.orm. (module is {})".format( name, attrs['__module__'])) return newcls
def instance(mcls): try: return mcls.__instance except AttributeError: cls = ABCMeta.__new__(mcls, 'namedtuple', (tuple,), {}) mcls.__instance = cls return cls
def __new__(cls, name, bases, dictionary): # pylint: disable=I0021,arguments-differ _checkBases(name, bases) if "__slots__" not in dictionary: dictionary["__slots__"] = () if "named_child" in dictionary: named_child = dictionary["named_child"] if type(named_child) is not str: raise NuitkaNodeDesignError( name, "Class named_child attribute must be string not", type(named_child), ) dictionary["__slots__"] += (intern("subnode_" + dictionary["named_child"]), ) if "named_children" in dictionary: if len(dictionary["named_children"]) <= 1: raise NuitkaNodeDesignError( name, "Use ExpressionChildHaving for one child node classes") assert type(dictionary["named_children"]) is tuple dictionary["__slots__"] += tuple( intern("subnode_" + named_child) for named_child in dictionary["named_children"]) # Not a method: if "checker" in dictionary: dictionary["checker"] = staticmethod(dictionary["checker"]) # false alarm, pylint: disable=I0021,too-many-function-args return ABCMeta.__new__(cls, name, bases, dictionary)
def __new__(self, clsname, bases, ns, *args, **kwargs): ns['_pbc__protected_functions__'] = [] for base in bases: protected = getattr(base, '_pbc__protected_functions__', None) if protected is not None: ns['_pbc__protected_functions__'] += protected if '__setattr__' in ns: ns['__setattr__'] = _pbc_protect_setter(ns['__setattr__']) else: @_pbc_protect_setter def setattribute(self, name, value): setattr(self, name, value) ns['__setattr__'] = setattribute for name, obj in ns.items(): if name in ns['_pbc__protected_functions__']: raise ProtectedFunction(name) if callable(obj): if getattr(obj, '_pbc__isprotected__', False) is True: ns['_pbc__protected_functions__'].append(name) return ABCMeta.__new__(self, clsname, bases, ns, *args, **kwargs)
def __new__(mcls,name,bases,namespace): # When a new class is created, that has this metaclass missingMethods = [] # Hold all methods missing wrongSignature = [] # Hold all methods having wrong signature if not name in ['MaskingObject','MultiMask']: # If MaskingObject, then just continue if not '__call__' in namespace: # Check if __call__ exists missingMethods.append('__call__') if namespace['dimensionality'] == '2D' or namespace['dimensionality'] == '3D': if 'plot' in namespace: # Check if plot has the right signature callFunction = namespace['plot'] requiredNames = ['ax','transformation'] notInSignature = requiredArguments(callFunction,requiredNames) if len(notInSignature)>0: wrongSignature.append(['plot',requiredNames,notInSignature]) else: missingMethods.append('plot') # Write error report ErrorMessage = [] if len(missingMethods)>0: ErrorMessage.append("Can't instantiate abstract class {} with abstract methods\n".format(name)+', '.join(missingMethods)) for entry in wrongSignature: methodName = entry[0] requiredNames = entry[1] notInSignature = entry[2] ErrorMessage.append('The "{}" method on "{}" must have all of the following arguments in call signature [{}]. Following are missing: [{}]'.format(methodName,name,', '.join(requiredNames),', '.join(notInSignature))) if len(ErrorMessage)>0: raise TypeError('\n'.join(ErrorMessage)) return ABCMeta.__new__(mcls,name,bases,namespace)
def __new__(mcls, name, bases, namespace): def find_field_spec(namespace, bases): """Look for a _fields attribute in the namespace or one of the base classes. """ field_spec = namespace.get('_fields') for base in bases: if field_spec is not None: break field_spec = getattr(base, '_fields', None) return field_spec def insert_namedtuple(name, bases, namespace): """Insert a namedtuple based on the given fields *after* the other base classes, so that calls to its methods can be intercepted. """ field_names = list(namespace['_fields']) basetuple = namedtuple('{}Fields'.format(name), field_names) del basetuple._source # is no longer accurate bases = bases + (basetuple,) namespace.setdefault('__doc__', basetuple.__doc__) namespace.setdefault('__slots__', ()) return bases field_spec = find_field_spec(namespace, bases) if not isinstance(field_spec, abstractproperty): try: namespace['_fields'] = OrderedDict(field_spec) except ValueError: namespace['_fields'] = OrderedDict(parse_fields(field_spec)) bases = insert_namedtuple(name, bases, namespace) return ABCMeta.__new__(mcls, name, bases, namespace)
def __new__(mcs, name, bases, members): class_name = members.get('class_name') cls = ABCMeta.__new__(mcs, name, bases, members) if class_name: mcs.classes[class_name] = cls LOG.debug('Ast class registered: %r -> %r', class_name, cls) return cls
def __new__(cls, name, bases, dictionary): # pylint: disable=I0021,arguments-differ _checkBases(name, bases) if "__slots__" not in dictionary: dictionary["__slots__"] = () return ABCMeta.__new__(cls, name, bases, dictionary)
def __new__(meta, classname, bases, classDict): """ Create new class and copy all docstrings from base classes. """ cls = ABCMeta.__new__(meta, classname, bases, classDict) for name in classDict: fn = classDict[name] if type(fn) != FunctionType: continue docs = [] for base in cls.mro(): if not hasattr(base, name) or base is object: continue basefn = getattr(base, name) basedoc = getdoc(basefn) if basedoc: docs.append((base, basedoc)) if len(docs) == 0: doc = None elif len(docs) == 1: doc = docs[0][1] else: doc = "" if docs[0][0] is cls: doc += docs[0][1] docs = docs[1:] for d in docs: doc += "\n\nOverrides %s.%s" % (d[0].__name__, name) doc += d[1] doc = doc.lstrip('\n') cls.__dict__[name].__doc__ = doc return cls
def __new__(mcs, name: str, bases: Tuple, namespace: Dict): for tp in object_types: # set for method_name in (f'set_{tp}_meta', f'set_{tp}_chunk_meta'): @implements(getattr(AbstractMetaStore, method_name)) async def _set(self, object_id: str, **meta): return self._set_meta(object_id, **meta) namespace[method_name] = _set # get for method_name in (f'get_{tp}_meta', f'get_{tp}_chunk_meta'): @implements(getattr(AbstractMetaStore, method_name)) async def _get(self, object_id: str, fields: List[str] = None): return self._get_meta(object_id, fields=fields) namespace[method_name] = _get # del for method_name in (f'del_{tp}_meta', f'del_{tp}_chunk_meta'): @implements(getattr(AbstractMetaStore, method_name)) async def _del(self, object_id: str): return self._del_meta(object_id) namespace[method_name] = _del return ABCMeta.__new__(mcs, name, bases, namespace)
def __new__(cls, cls_name, cls_parents, cls_dict): src_cls = ABCMeta.__new__(cls, cls_name, cls_parents, cls_dict) if cls_name.split('.')[-1] == 'NewBase': return src_cls is_abc = cls_dict.pop('__abstract__', False) set_abstract(src_cls, is_abc) cls_attr_names = [nm for nm in dir(src_cls) if not nm.startswith('__')] cls_dict = dict([(name, getattr(src_cls, name)) for name in cls_attr_names]) if not is_abstract(src_cls): cls.validate(src_cls) source_attrs_dict = {} for name, attr in cls_dict.items(): if isinstance(attr, AbstractAttr): source_attrs_dict[name] = attr src_cls.__attrs__ = source_attrs_dict source_name = cls_dict.get('__sourcename__') or cls.get_source_name(src_cls) src_cls.__sourcename__ = source_name return src_cls
def __new__(cls, name, bases, attrs): # type: (Type[ManifestItemMeta], str, Tuple[ManifestItemMeta, ...], Dict[str, Any]) -> type rv = ABCMeta.__new__(cls, name, bases, attrs) if not isabstract(rv): assert issubclass(rv, ManifestItem) assert isinstance(rv.item_type, str) item_types[rv.item_type] = rv return rv
def __new__(mcls, name, bases, namespace, **kwargs): def hot_patch(required, existing): if required not in namespace and existing in namespace: namespace[required] = namespace[existing] hot_patch('__iter__', 'get_data') hot_patch('__len__', 'size') return ABCMeta.__new__(mcls, name, bases, namespace, **kwargs)
def __new__(mcs, name, bases, namespace): # pylint: disable=arguments-differ,protected-access,too-many-function-args newcls = ABCMeta.__new__(mcs, name, bases, namespace) # pylint: disable=too-many-function-args newcls._logger = logging.getLogger('{}.{}'.format(namespace['__module__'], name)) # Set the plugin type string and query type string based on the plugin type string newcls._plugin_type_string = get_type_string_from_class(namespace['__module__'], name) newcls._query_type_string = get_query_type_from_type_string(newcls._plugin_type_string) return newcls
def __new__(cls, name, bases, attrs): new_cls = type.__new__(cls, name, bases, attrs) if not hasattr(new_cls, '_registry_name'): raise Exception('Any class with ABCRegistry as metaclass has to\ define the class attribute _registry_name') cls.REGISTRY[new_cls._registry_name] = new_cls if not hasattr(new_cls, '_unit_test'): new_cls._unit_test = True return ABCMeta.__new__(cls, name, bases, attrs)
def __new__(cls, name, bases, namespace, **kwargs): newcls = ABCMeta.__new__(cls, name, bases, namespace, **kwargs) # pylint: disable=too-many-function-args newcls._logger = logging.getLogger(f"{namespace['__module__']}.{name}") # Set the plugin type string and query type string based on the plugin type string newcls._plugin_type_string = get_type_string_from_class(namespace['__module__'], name) # pylint: disable=protected-access newcls._query_type_string = get_query_type_from_type_string(newcls._plugin_type_string) # pylint: disable=protected-access return newcls
def __new__(mcls, name, bases, namespace): fields = namespace.get('__slots__', ()) for base in bases: oldfields = getattr(base, '_fields', None) if oldfields: fields = oldfields + fields # All freezable objects must use __slots__ for attribute definition. namespace.setdefault('__slots__', ()) namespace['_fields'] = fields return ABCMeta.__new__(mcls, name, bases, namespace)
def __new__(metacls, clsname, bases, namespace, name=None, **kwargs): if name is not None: if name in metacls.all_applets: raise NameError(f"Applet {name!r} already exists") namespace["name"] = name cls = ABCMeta.__new__(metacls, clsname, bases, namespace, **kwargs) if name is not None: metacls.all_applets[name] = cls return cls
def __new__(mcs: type, name: str, bases: Tuple[type], d: Dict[str, Any]): if not __debug__ or name == "Contract": return type.__new__(mcs, name, bases, d) logger.info(f"Create contracted/relaxed for class `{name}`") paranoid = d.get("__paranoid__", True) logger.debug(f"Create relaxed version") relaxed_bases = tuple([getattr(b, '__relaxed__', b) for b in bases]) relaxed_class = ABCMeta.__new__(mcs, "relaxed_" + name, relaxed_bases, d) logger.debug(f"Relaxed version created: %s", relaxed_class) logger.debug(f"Create contracted version") contracted_class = _Contractor( ABCMeta.__new__(mcs, "_contracted_" + name, bases, { **d, '__relaxed__': relaxed_class }), paranoid).wrap() logger.debug(f"Contracted version created: %s", contracted_class) relaxed_class.__contracted__ = contracted_class return contracted_class
def __new__(mcs, *args: Any, **kwargs: Any) -> "MetaAggregate": try: args[2]["__annotations__"].pop("id") except KeyError: pass else: args[2]["_annotations_mention_id"] = True cls = ABCMeta.__new__(mcs, *args) cls = dataclass(cls) return cast(MetaAggregate, cls)
def __new__(mcs, name, bases, namespace): cls = ABCMeta.__new__(mcs, name, bases, namespace) # dereference to avoid infinite recursion if decorated smt2_encode # is called via cls.smt2_encode to_wrap = cls.smt2_encode @wraps(to_wrap) def __wrapper(encodable, encoder): "Decorated smt2_encode" return encoder.cached_encode(encodable, to_wrap) cls.smt2_encode = __wrapper return cls
def __new__(mcs, *args, **kwargs): result = ABCMeta.__new__(mcs, *args, **kwargs) if result.required_extras is not None: result.__init__ = requires(result.required_extras)(result.__init__) load_from_checkpoint = getattr(result, "load_from_checkpoint", None) if load_from_checkpoint is not None: result.load_from_checkpoint = classmethod( requires(result.required_extras)( result.load_from_checkpoint.__func__)) return result
def __new__(cls, classname, bases, dict): __init__ = dict.get("__init__") if __init__ is None: for base in bases: __init__ = getattr(base, "__init__", None) if __init__ is not None: break dict["parameter_names"] = getargs(__init__) return ABCMeta.__new__(cls, classname, bases, dict)
def __new__(cls, name, supers, kwargs): t = ABCMeta.__new__(cls, name, supers, {}) # Force __subclass__ to be a classmethod # if not isinstance(t.__subclass__, classmethod): # t.__subclass__ = classmethod(_im_func(t.__subclass__)) if '__subclass__' in kwargs: sc = kwargs['__subclass__'] if not isinstance(sc, classmethod): kwargs['__subclass__'] = classmethod(sc) t.__subclass__(**kwargs) return t
def __new__(mcs, name, bases, namespace): # type: (typing.Type['RuleRegistry'], str, typing.Tuple[type, ...], typing.Dict[str, typing.Any]) -> type new_cls = ABCMeta.__new__(mcs, name, bases, namespace) # type: typing.Type['BaseRule'] mcs._prevent_metaclass_usage_from_not_BaseRule_extensions(new_cls) if name != 'BaseRule': mcs._validate_class_attributes(new_cls) mcs._prevent_rule_duplication(new_cls) RuleRegistry._REGISTRY[new_cls.error_code] = new_cls return new_cls
def __new__(mcls, name, bases, attrs): cls = ABCMeta.__new__(mcls, name, bases, attrs) try: # Register class as the check for each of its extensions. for ext in attrs.get("extensions", []): Style50.extension_map[ext] = cls for name in cls.magic_names: Style50.magic_map[name] = cls except TypeError: # If `extensions` property isn't iterable, skip it. pass return cls
def __new__(mcs, name, bases, namespace): newcls = ABCMeta.__new__(mcs, name, bases, namespace) newcls._logger = logging.getLogger('{}.{}'.format( namespace['__module__'], name)) # Set the plugin type string and query type string based on the plugin type string newcls._plugin_type_string = get_type_string_from_class( namespace['__module__'], name) newcls._query_type_string = get_query_type_from_type_string( newcls._plugin_type_string) return newcls
def __new__(metacls, name, bases, namespace, **kwds): result = ABCMeta.__new__(metacls, name, bases, dict(namespace)) NAME = result.NAME DESCRIPTION = result.DESCRIPTION if NAME and DESCRIPTION: result.__call__ = log_before_after(NAME, DESCRIPTION)( to_step_result(result.__call__)) else: result.__call__ = to_step_result(result.__call__) return result
def __new__(mcs, name: str, bases: Tuple, class_dict: Dict[str, Any]): result = ABCMeta.__new__(mcs, name, bases, class_dict) # Skip attaching for the base class if name == "TaskTester": return result # Attach forward test mcs.attach_test(result, "test_forward", _test_forward) # Attach fit test if "example_train_sample" in class_dict: mcs.attach_test(result, "test_fit", _test_fit) # Attach JIT tests if result.traceable: mcs.attach_test(result, "test_jit_trace", _test_jit_trace) if result.scriptable: mcs.attach_test(result, "test_jit_script", _test_jit_script) # Attach CLI test if result.cli_command is not None: mcs.attach_test(result, "test_cli", _test_cli) # Skip tests if dependencies not available for attribute_name, attribute_value in filter(lambda x: x[0].startswith("test"), inspect.getmembers(result)): setattr( result, attribute_name, pytest.mark.skipif(not result.is_testing, reason="Dependencies not available.")( _copy_func(attribute_value) ), ) # Attach error check tests mcs.attach_test( result, "test_load_from_checkpoint_dependency_error", _test_load_from_checkpoint_dependency_error ) mcs.attach_test(result, "test_init_dependency_error", _test_init_dependency_error) for dependency_test in ["test_load_from_checkpoint_dependency_error", "test_init_dependency_error"]: setattr( result, dependency_test, pytest.mark.skipif(result.is_available, reason="Dependencies available.")( _copy_func(getattr(result, dependency_test)) ), ) return result
def __new__(cls, name, bases, attr): """ Ensure that subclasses do not carry over previous checks. If you import multiple modules in which MonitorDispatcher is subclassed, all checks are saved in the global MonitorDispatcher class. """ subclass = ABCMeta.__new__(cls, name, bases, attr) subclass.checks = [ check for check in subclass.checks if check in attr.values() ] return subclass
def __new__(mcls, name, bases, namespace): fields = namespace.get('_fields') for base in bases: if fields is not None: break fields = getattr(base, '_fields', None) if not isinstance(fields, abstractproperty): basetuple = _namedtuple(name, fields) bases = (basetuple, ) + bases namespace.pop('_fields', None) namespace.setdefault('__doc__', basetuple.__doc__) namespace.setdefault('__slots__', ()) return ABCMeta.__new__(mcls, name, bases, namespace)
def __new__(mcls, name, bases, namespace): fields = namespace.get('_fields') for base in bases: if fields is not None: break fields = getattr(base, '_fields', None) if not isinstance(fields, abstractproperty): basetuple = _namedtuple(name, fields) bases = (basetuple,) + bases namespace.pop('_fields', None) namespace.setdefault('__doc__', basetuple.__doc__) namespace.setdefault('__slots__', ()) return ABCMeta.__new__(mcls, name, bases, namespace)
def __new__(metacls, name, bases, namespace, **kwds): result = ABCMeta.__new__(metacls, name, bases, dict(namespace)) NAME = result.NAME DESCRIPTION = result.DESCRIPTION if NAME and DESCRIPTION: result.__call__ = log_before_after(NAME, DESCRIPTION)( to_step_result(result.__call__) ) else: result.__call__ = to_step_result(result.__call__) return result
def __new__(metacls, name, bases, dct): schema_attrs = metacls._get_schema_attributes(name=name, bases=bases, dct=dct) dct.update(schema_attrs) cls = ABCMeta.__new__(metacls, name, bases, dct) # allow self-references etc. for field_name, field in cls._fields.iteritems(): field.set_parent(cls) if metacls.auto_register: auto_store.add_record(cls, _bump_stack_level=True) return cls
def __new__(cls, name, bases, dictionary): # pylint: disable=I0021,arguments-differ _checkBases(name, bases) if "__slots__" not in dictionary: dictionary["__slots__"] = () if "named_child" in dictionary: dictionary["__slots__"] += (intern("subnode_" + dictionary["named_child"]),) # Not a method: if "checker" in dictionary: dictionary["checker"] = staticmethod(dictionary["checker"]) # false alarm, pylint: disable=I0021,too-many-function-args return ABCMeta.__new__(cls, name, bases, dictionary)
def __new__(metacls, name, bases, dct): schema_attrs = metacls._get_schema_attributes( name=name, bases=bases, dct=dct ) dct.update(schema_attrs) cls = ABCMeta.__new__(metacls, name, bases, dct) # allow self-references etc. for field_name, field in cls._fields.iteritems(): field.set_parent(cls) if metacls.auto_register: auto_store.add_record(cls, _bump_stack_level=True) return cls
def __new__(meta, name, bases, clsdict): if not('__doc__' in clsdict and clsdict['__doc__']): for mro_cls in (mro_cls for base in bases for mro_cls in base.mro()): doc = mro_cls.__doc__ if doc: clsdict['__doc__'] = doc break for attr, attribute in clsdict.items(): if not attribute.__doc__: for mro_cls in (mro_cls for base in bases for mro_cls in base.mro() if hasattr(mro_cls, attr)): doc=getattr(getattr(mro_cls, attr), '__doc__') if doc: attribute.__doc__ = doc break return ABCMeta.__new__(meta, name, bases, clsdict)
def __new__(cls, name, bases, attrs): klass = ABCMeta.__new__(cls, name, bases, dict(attrs)) if isinstance(attrs.get('messages'), FieldErrorMessages): klass.messages = attrs.get('messages') else: klass.messages = FieldErrorMessages(**attrs.get('messages', {})) if getattr(klass, 'new', None): constructor = klass.__init__ new_constructor = klass.new def field_init(self, *args, **kwargs): constructor(self, *args, **kwargs) new_constructor(self, *args, **kwargs) klass.__init__ = field_init delattr(klass, 'new') return klass
def __new__(mcls, name, bases, dct): base_types = [b for b in bases if isinstance(b, Context)] if len(base_types) > 1: raise TypeError("Cannot resolve inheritance of multiple context types") elif len(base_types) == 1: parent = base_types[0] base_type = parent.base_type base_context = parent._context instances = parent._instances else: # This is for a base context type. base_type = None base_context = None instances = [] dct = mcls.__new_init(dct) cls = ABCMeta.__new__(mcls, name, bases, dct) cls._instances = instances cls.base_type = base_type cls._context = base_context return cls
def __new__(cls, name, bases, attrs): cls, name, bases, attrs = cls.prepare_class(cls, name, bases, attrs) klass = ABCMeta.__new__(cls, name, bases, dict(attrs)) klass._fields = {} klass._tags = {} klass._elements = {} klass._subfields = {} klass._pending_schemas = {} klass._load_keys = {} klass._schema_callables = SchemaCallableObject() klass._field_callables = FieldCallableObject() klass._config = DEFAULT_SCHEMA_OPTIONS klass.handle_bases(bases) klass.handle_poly(cls, name, bases, attrs) klass.handle_config() klass.handle_tags() klass.find_fields() klass.process_fields() klass._schema_callables.find(klass) klass._field_callables.find(klass) return klass
def __new__(mcls, name, bases, namespace): cls = ABCMeta.__new__(mcls, name, bases, namespace) # 'Protocol' must be an explicit base class in order for a class to # be a protocol. cls._is_protocol = name == u'Protocol' or Protocol in bases return cls
def __new__(cls, name, bases, attrs, **kwargs): rv = ABCMeta.__new__(cls, name, bases, attrs, **kwargs) item_types[rv.item_type] = rv return rv
def __new__(a, b, c, d): print a, "//", b, "//", c, "//", d print type(d), dir(d), d return ABCMeta.__new__(a, b, c, d)
def __new__(mcs, name, bases, attrs, *, is_interface=False): attrs['__interface__'] = None return ABCMeta.__new__(mcs, name, bases, attrs)
def __new__(mcs, name, bases, dict): _populate_dict(dict) return ABCMeta.__new__(mcs, name, bases, dict)