def __init__(cls, name, bases, attrs, *, interface=None): if interface is None: # Try to fetch it from a parent class interface = cls.interface zi.implementer(interface)(cls) cls.interface = interface # Slap on an attribute descriptor for every static attribute in the # interface. (Derived attributes promise that they're computed by the # class via @property or some other mechanism.) for key in interface: attr = interface[key] if not isinstance(attr, zi.Attribute): continue mode = attr.queryTaggedValue('mode') if mode == 'static': if key in cls.__dict__: # TODO i have seen the light: there is a good reason to do # this. see HealthRender continue raise TypeError("Implementation {!r} " "defines static attribute {!r}".format( cls, key)) else: setattr(cls, key, ComponentAttribute(attr))
def test_allow_unjellyable_class(t): classes = ( implementer(IUnjellyable)(types.ClassType("foo", (), {})), implementer(IUnjellyable)(types.TypeType("bar", (), {})), ) for cls in classes: t.assertTrue(t.options.isClassAllowed(cls))
def includeme(config): """ Don't add this to your ``pyramid_includes``, but add the ``kotti_configure`` above to your ``kotti.configurators`` instead. :param config: Pyramid configurator object. :type config: :class:`pyramid.config.Configurator` """ # Set a default permission. # If you want to bypass the default permission for certain views, # you can decorate them with a special permission # (``NO_PERMISSION_REQUIRED`` from ``pyramid.security`` which indicates # that the view should always be executable by entirely anonymous users, # regardless of the default permission. config.set_default_permission('view') # Assign the default workflow for files and images implementer(IDefaultWorkflow)(Image) implementer(IDefaultWorkflow)(File) base_includes = ('kotti_backend.views.goto_frontend',) for include in base_includes: config.include(include) # translations config.add_translation_dirs('kotti_backend:locale')
def __init__(cls, name, bases, attrs, *, interface=None): if interface is None: # Try to fetch it from a parent class interface = cls.interface zi.implementer(interface)(cls) cls.interface = interface # Slap on an attribute descriptor for every static attribute in the # interface. (Derived attributes promise that they're computed by the # class via @property or some other mechanism.) for key in interface: attr = interface[key] if not isinstance(attr, zi.Attribute): continue mode = attr.queryTaggedValue('mode') if mode == 'static': if key in cls.__dict__: # TODO i have seen the light: there is a good reason to do # this. see HealthRender continue raise TypeError( "Implementation {!r} " "defines static attribute {!r}" .format(cls, key) ) else: setattr(cls, key, ComponentAttribute(attr))
def create(resource_type: typing.Text, fhir_release: FHIR_VERSION = FHIR_VERSION.DEFAULT): """ """ model = lookup_fhir_class(resource_type, fhir_release) if not IModel.implementedBy(model): implementer(IModel)(model) return model
def __call__(self, wrapped): implementer(self.content_iface)(wrapped) def callback(context, name, ob): config = context.config.with_package(info.module) config.add_content_type(self.content_iface, wrapped, **self.meta) info = self.venusian.attach(wrapped, callback, category='substanced') self.meta['_info'] = info.codeinfo # fbo "action_method" return wrapped
def __init__(cls, name, bases, attrs): got = iface = get_interface(cls) if iface == cls._root: cls._interface = iface = create_dynamic_interface("I"+name) implementer(iface)(cls) if got is None: cls._root = iface return super(ConfiguredObjectMeta, cls).__init__(name, bases, attrs)
def _makeMailerStub(*args, **kw): from zope.interface import implementer from repoze.sendmail.interfaces import IMailer implementer(IMailer) class MailerStub(object): def __init__(self, *args, **kw): self.sent_messages = [] def send(self, fromaddr, toaddrs, message): self.sent_messages.append((fromaddr, toaddrs, message)) return MailerStub(*args, **kw)
def __init__(self, descriptor, view_type=None): try: descriptor.__get__ except AttributeError: raise TypeError( "NamedTemplateImplementation must be passed a descriptor.") self.descriptor = descriptor interface.implementer(INamedTemplate)(self) if view_type is not None: component.adapter(view_type)(self)
def add_display_message(config, exception, message): implementer(IFailure)(exception) #xxx: def register_display_message(): adapters = config.registry.adapters adapters.register([IFailure], IDisplayMessage, nameof(exception), message) discriminator = nameof(exception) desc = "human redable message for {}".format(nameof(exception)) introspectables = [ config.introspectable('display_messages', discriminator, desc, 'display_message') ] config.action(discriminator, register_display_message, introspectables=introspectables)
def __init__(self, descriptor, view_type=None): try: descriptor.__get__ except AttributeError: raise TypeError( "NamedTemplateImplementation must be passed a descriptor." ) self.descriptor = descriptor interface.implementer(INamedTemplate)(self) if view_type is not None: component.adapter(view_type)(self)
class WurflClassifier(object): implementer(IClassifier) def __init__(self, conf=None): self.db, self.index = initialize_db(conf) def __call__(self, user_agent): if not user_agent: return None match = self.index.search(user_agent) if not match: return None node, matchstring, matchlen = match dev_id = node.value if dev_id is NOTSET: ratio = old_div(matchlen, len(user_agent)) if matchlen < 18 and ratio < 0.8: return None dev_id = next(list(node.values())) device = WDevice.deserialize(self.db[dev_id]) return device
def test_multi_handler_unregistration(self): """ There was a bug where multiple handlers for the same required specification would all be removed when one of them was unregistered. """ from zope import interface calls = [] class I(interface.Interface): pass def factory1(event): calls.append(2) def factory2(event): calls.append(3) class Event(object): pass Event = interface.implementer(I)(Event) self.components.registerHandler(factory1, [I,]) self.components.registerHandler(factory2, [I,]) self.components.handle(Event()) self.assertEqual(sum(calls), 5) self.assertTrue(self.components.unregisterHandler(factory1, [I,])) calls = [] self.components.handle(Event()) self.assertEqual(sum(calls), 3)
class AuthChallenge(object): implementer(IAuthChallenge) def __init__(self, auth_id, auth_domain, challenge): self.auth_id = auth_id self.auth_domain = auth_domain self.challenge = challenge
def NodeFactory(name, iface_name, base=base): from zope.interface.interface import InterfaceClass def __init__(self, parent=None): self.parent = parent attrs = {"__init__": __init__} cls = type(name, (base, ), attrs) return implementer(InterfaceClass(iface_name))(cls)
class VanillaMasterClientService(service.MultiService): implementer(IAuthService) @staticmethod def build(punitive_model: PunitiveModel, host: str, port: int, register_port: int): domains = ["localhost", ""] punitive_model_adapter = PunitiveModelAdapter(punitive_model) protocol_factory = MasterClientProtocolFactory(punitive_model_adapter, host, register_port) return VanillaMasterClientService(port, protocol_factory, interface=host, domains=domains) def __init__(self, port: int, factory: MasterClientProtocolFactory, interface: str = '', domains: List[str] = []) -> None: service.MultiService.__init__(self) self._protocol_factory = factory self._domains = domains self._child_service = TCPClient(interface, port, factory) self._child_service.setServiceParent(self) def handles_domain(self, auth_domain): return auth_domain in self._domains def try_authenticate(self, auth_domain, auth_name): return self._protocol_factory.try_auth(auth_domain, auth_name) def answer_challenge(self, auth_domain, auth_id, answer): return self._protocol_factory.answer_challenge(auth_id, answer)
class Device(object): implementer(IDevice) def __init__(self, user_agent, type_, platform=u"computer"): self.user_agent = user_agent self.type = type_ self.platform = str(platform)
def __get__(self, inst, klass): if inst is None: return self bound = self._func.__get__(inst, klass) # Because we cannot add attributes to a bound method, # and because we want to limit the overhead of additional # function calls (these methods are commonly used), # we derive a new class whose __call__ is exactly the bound # method's __call__. For this reason, it's important to access # these methods only once and cache them (which is what copy_storage_methods # does). stale_aware_class = type( 'StaleAware_' + bound.__name__, (_StaleAwareMethodTemplate,), { '__slots__': (), '__call__': bound.__call__, } ) stale_aware_class = implementer(IStaleAware)(stale_aware_class) # update_wrapper() doesn't work on a type. # stale_aware_class = functools.wraps(bound)(stale_aware_class) return stale_aware_class(bound)
def AngularSessionFactoryConfig(**options): PyramidBeakerSessionObject = BeakerSessionFactoryConfig(**options) class PyramidAngularSessionObject(PyramidBeakerSessionObject): def __init__(self, request): PyramidBeakerSessionObject.__init__(self, request) def csrft_callback(request, response): exception = getattr(request, 'exception', None) if exception is None and self.accessed(): csrft = self.get('_csrft_', None) cookie = self.cookie headers = self.__dict__['_headers'] if csrft and self.dirty(): # Temporarily swap the cookie key and value # for XSRF-TOKEN and the csrf_token. This motly avoids # the need to work directly with the cookie and the # beaker configuration options. old = (self.key, self.id) try: self.key = 'XSRF-TOKEN' self._set_cookie_values() cookie[self.key].coded_value = csrft value = cookie[self.key].output(header='') response.headerlist.append(('Set-Cookie', value)) finally: self.key, self.id = old request.add_response_callback(csrft_callback) def invalidate(self): # Easiest thing to do is eagerly create a new csrf immediately self._session().invalidate() self.new_csrf_token() return implementer(ISession)(PyramidAngularSessionObject)
def __call__(self, cls): ifaces_needed = [] implemented = implementedBy(cls) ifaces_needed = [ iface for iface in self._ifaces if not implemented.isOrExtends(iface) ] return implementer(*ifaces_needed)(cls)
def patch_fhir_base_model(): """" """ from zope.interface import implementer import fhir.resources.resource as fmr from guillotina_fhirfield.interfaces import IFhirResource # We force implement IFhirResource fmr.Resource = \ implementer(IFhirResource)(fmr.Resource)
def __call__(self, value, **kws): # Step 1: Check that the keyword argument names match the # discriminators if set(kws).difference(set(self.discriminators)): raise ValueError('One or more keyword arguments did not match the ' 'discriminators.') # Step 2: Create an attribute value factory factory = ValueFactory(value, self.value_class, self.discriminators) # Step 3: Build the adaptation signature signature = [] for disc in self.discriminators: spec = get_specification(kws.get(disc)) signature.append(spec) # Step 4: Assert the adaptation signature onto the factory adapter(*signature)(factory) implementer(IValue)(factory) return factory
def decorator(cls): field_list = __build_field_list( wrapped_type, cls, ) metadata = ProxyClassMetadata( wrapped_type, field_list, as_container, keyFormat, itemProxy, itemFormat, methods, ) if len(metadata.field_list) > 0 and as_container: raise TypeError() attrs = dict(cls.__dict__) for field in metadata.field_list: attrs[field.name] = property(*field.descriptors) slots = ('__jsonable__', ) __slots__ = attrs.get('__slots__', slots) if '__jsonable__' not in __slots__: __slots__ = slots + __slots__ attrs['__slots__'] = __slots__ if '__init__' not in attrs: attrs['__init__'] = __init__ if '__eq__' not in attrs: attrs['__eq__'] = __eq__ if '__ne__' not in attrs: attrs['__ne__'] = __ne__ if '__repr__' not in attrs: attrs['__repr__'] = __repr__ if as_container: if '__len__' not in attrs: attrs['__len__'] = __len__ if '__iter__' not in attrs: attrs['__iter__'] = __iter__ if '__getitem__' not in attrs: attrs['__getitem__'] = __getitem__ if '__setitem__' not in attrs: attrs['__setitem__'] = __setitem__ if '__delitem__' not in attrs: attrs['__delitem__'] = __delitem__ if '__contains__' not in attrs: attrs['__contains__'] = __contains__ attrs['__jsonable_proxy__'] = metadata new_class = type(cls.__name__, cls.__bases__, attrs) new_class = implementer(IJsonable)(new_class) return new_class
def __init__(self, pipes, context=None): """ """ if not hasattr(pipes, '__iter__'): raise ValueError("pipes not an iterable") self.pipes = [] self.context = context for pipe in pipes: self.pipes.append(implementer(IPipe)(pipe()))
class SpydServiceMaker(object): implementer(service.IServiceMaker, IPlugin) tapname = "spyd" description = "A Sauerbraten server." options = Options def makeService(self, options): return WrapperService(options)
def test_getInterfaces_implicit(self): from zope.interface import Interface from zope.interface import implementer class IBaz(Interface): pass _callable = implementer(IBaz)(fails_if_called(self)) factory = self._makeOne(_callable) spec = factory.getInterfaces() self.assertEqual(list(spec), [IBaz])
class MITClassifier(object): implementer(IClassifier) def __init__(self): self.patterns = MITUAPatternMatcher() self.patterns.load_patterns() def __call__(self, user_agent): device_infos = self.patterns.lookup(user_agent) if device_infos is None: return None return MITDevice(str(user_agent), device_infos)
def add_plugin(self, installname, plugin, iface=None, categoryname=None, strict=True): if iface is None: try: iface = next(iter(implementedBy(plugin))) if iface is IPlugin: iface = next(iter(implementedBy(plugin))) #xxx except StopIteration: raise Exception("plugin {} is not implemented by any interface".format(plugin)) if strict: implementer(iface)(plugin) verifyClass(iface, plugin) if categoryname is None: categoryname = installname plugin_factory = self.maybe_dotted(plugin) self.register_implementation(iface, installname, plugin_factory) logger.info("install: %s -- %s (category:%s)", installname, plugin_factory.__name__, categoryname) self.registry.installed_plugin[installname] = (iface, categoryname)
class VanillaAuthSuccess(object): implementer(IAuthSuccess) def __init__(self, auth_domain, auth_name): self.group_provider = VanillaGroupProvider(auth_domain, auth_name) self.room_message = "{name#client} claimed auth as {auth#auth_name}@{domain#auth_domain}" self.room_message_kwargs = { 'auth_name': auth_name, 'auth_domain': auth_domain } self.client_message = None self.client_message_kwargs = {}
class OnlyFreeStrategy(Strategy): implementer(INegotiationStrategy) def __init__(self, *args, **kwargs): price_model = ZeroPrice() super().__init__(price_model, 0.0, 0.0, True) def _get_mean_rate(self, rates): return 0.0 def _get_response_rate(self, rates, offer_count): return 0.0 def _make_rate_offer(self, rates, offer_count): return 0.0
def add_content_type(config, content_iface, factory, **meta): """ Configurator method which adds a content type. Call via ``config.add_content_type`` during Pyramid configuration phase. ``content_iface`` is an interface representing the content type. ``factory`` is a class or function which produces a content instance. ``**meta`` is an arbitrary set of keywords associated with the content type in the content registry. """ if not IInterface.providedBy(content_iface): raise ConfigurationError( 'The provided "content_iface" argument (%r) is not an ' 'interface object (it does not inherit from ' 'zope.interface.Interface)' % type) if not content_iface.implementedBy(factory): # was not called by decorator implementer(content_iface)(factory) if not inspect.isclass(factory): factory = provides_factory(factory, content_iface) def register_factory(): config.registry.content.add(content_iface, factory, **meta) discrim = ('sd-content-type', content_iface) intr = config.introspectable( 'substance d content types', discrim, dotted_name(content_iface), 'substance d content type', ) intr['meta'] = meta intr['content_iface'] = content_iface intr['factory'] = factory config.action(discrim, callable=register_factory, introspectables=(intr,))
def test_fhir_field_value_pickling(): """ """ with open(str(FHIR_EXAMPLE_RESOURCES / "Organization.json"), "r") as fp: fhir_json = json.load(fp) model = implementer(IFhirResource)(lookup_fhir_class( fhir_json["resourceType"])) fhir_resource = model(fhir_json) fhir_resource_value = FhirFieldValue(obj=fhir_resource) serialized = pickle.dumps(fhir_resource_value) deserialized = pickle.loads(serialized) assert len(deserialized.stringify()) == len( fhir_resource_value.stringify())
class BasicAvailabilityWeightedStrategy(Strategy): """Basic strategy to target blob prices based on supply relative to mean supply Discount price target with each incoming request, and raise it with each outgoing from the modeled price until the rate is accepted or a threshold is reached """ implementer(INegotiationStrategy) def __init__(self, blob_tracker, acceleration=1.25, deceleration=0.9, max_rate=None, min_rate=0.0, is_generous=None, base_price=0.0001, alpha=1.0): price_model = MeanAvailabilityWeightedPrice(blob_tracker, base_price=base_price, alpha=alpha) super().__init__(price_model, max_rate, min_rate, is_generous) self._acceleration = Decimal( acceleration) # rate of how quickly to ramp offer self._deceleration = Decimal(deceleration) def _get_mean_rate(self, rates): mean_rate = Decimal(sum(rates)) / Decimal(max(len(rates), 1)) return mean_rate def _premium(self, rate, turn): return rate * (self._acceleration**Decimal(turn)) def _discount(self, rate, turn): return rate * (self._deceleration**Decimal(turn)) def _get_response_rate(self, rates, offer_count): rate = self._get_mean_rate(rates) discounted = self._discount(rate, offer_count) rounded_price = round(discounted, 5) return self._bounded_price(rounded_price) def _make_rate_offer(self, rates, offer_count): rate = self._get_mean_rate(rates) with_premium = self._premium(rate, offer_count) rounded_price = round(with_premium, 5) return self._bounded_price(rounded_price)
def __call__(self, cls): """Check if the new class satisfy the requirements of the base class and return the implementation of the new class parameters ---------- cls class the new class to process """ self.check(cls) if not hasattr(cls, '_fused_base'): cls._fused_base = [] cls._fused_base.append(self._base) return base(implementer(interface(self._base))(cls))
def __new__(cls, name, bases, attrs, **kwargs): super_new = super().__new__ # xxx: customize module path? module = attrs.pop("__module__", cls.__module__) new_attrs = {"__module__": module} classcell = attrs.pop("__classcell__", None) if classcell is not None: new_attrs["__classcell__"] = classcell new_class = super_new(cls, name, bases, new_attrs, **kwargs) # Attach Interface new_class = implementer(IModel)(new_class) return new_class
async def test_fhir_field_value_serializer(dummy_request): """ """ with open(str(FHIR_EXAMPLE_RESOURCES / "Organization.json"), "r") as fp: fhir_json = json.load(fp) model = implementer(IFhirResource)(lookup_fhir_class( fhir_json["resourceType"])) fhir_resource = model(fhir_json) value = FhirFieldValue(obj=fhir_resource) serialized = query_adapter(value, IValueToJson) assert serialized == value.as_json() serialized = query_adapter(FhirFieldValue(), IValueToJson) assert serialized is None
class ReferenceUtils(BrowserView): """see IReferenceUtils for documentation """ implementer(IReferenceUtils) def _processRefs(self, refs, sort_on, reverse): """util method to run the refs through LazyFilter """ filtered_refs = [] if refs and refs is not None: if not isinstance(refs, list): refs = [refs] filtered_refs = list(LazyFilter(refs, skip='View')) if sort_on is not None: filtered_refs.sort(lambda x, y: cmp( x.getField(sort_on).get(x), y.getField(sort_on).get(y))) if reverse: filtered_refs.reverse() return filtered_refs def getFilteredRefs(self, obj, relationship, sort_on=None, reverse=False): """see IReferenceUtils for documentation """ refs = obj.getRefs(relationship) return self._processRefs(refs, sort_on, reverse) def getFilteredBRefs(self, obj, relationship, sort_on=None, reverse=False): """see IReferenceUtils for documentation """ refs = obj.getBRefs(relationship) return self._processRefs(refs, sort_on, reverse) def getFilteredOrderedRefs(self, obj, relationship, reverse=False): refs = obj.getReferenceImpl(relationship) refs.sort(lambda a, b: cmp(getattr(a, 'order', None), getattr(b, 'order', None))) ref_objs = [ref.getTargetObject() for ref in refs] return self._processRefs(ref_objs, None, reverse) def getFilteredOrderedBRefs(self, obj, relationship, reverse=False): refs = obj.getBackReferenceImpl(relationship) refs.sort(lambda a, b: cmp(getattr(a, 'order', None), getattr(b, 'order', None))) ref_objs = [ref.getTargetObject() for ref in refs] return self._processRefs(ref_objs, None, reverse)
class NoOpMasterClientService(service.Service): implementer(IAuthService) def handles_domain(self, auth_domain): return True def try_authenticate(self, auth_domain, auth_name): return defer.fail( AuthFailedException( "Could not determine which master server to send your request to." )) def answer_challenge(self, auth_domain, auth_id, answer): return defer.fail( AuthFailedException( "Could not determine which master server to send your request to." ))
class VanillaGroupProvider(object): implementer(IGroupProvider) def __init__(self, domain, auth_name): self.domain = domain self.auth_name = auth_name self._group_names = (domain_to_auth_group(domain), "{self.auth_name}@{self.domain}".format( self=self)) def get_group_names(self): return self._group_names def __repr__(self): return "<VanillaGroupProvider {authentication.auth_name}@{authentication.domain}>".format( authentication=self)
class MeanAvailabilityWeightedPrice(object): """Calculate mean-blob-availability and stream-position weighted price for a blob Attributes: base_price (float): base price alpha (float): constant, > 0.0 and <= 1.0, used to more highly value blobs at the beginning of a stream. alpha defaults to 1.0, which has a null effect blob_tracker (BlobAvailabilityTracker): blob availability tracker """ implementer(IBlobPriceModel) def __init__(self, tracker, base_price=None, alpha=1.0): self.blob_tracker = tracker base_price = base_price if base_price is not None else conf.settings[ 'data_rate'] self.base_price = Decimal(base_price) self.alpha = Decimal(alpha) def calculate_price(self, blob): mean_availability = self.blob_tracker.last_mean_availability availability = self.blob_tracker.availability.get(blob, []) index = 0 # blob.index availability_mult = self._get_availability_multiplier( mean_availability, availability) price = self.base_price * availability_mult / self._frontload(index) return round(price, 5) def _get_availability_multiplier(self, mean_availability, availability): return Decimal( max(1, mean_availability) / Decimal(max(1, len(availability)))) def _frontload(self, index): """Get front-load multiplier, used to weight prices of blobs in a stream towards the front of the stream. At index 0, returns 1.0 As index increases, return value approaches 2.0 @param index: blob position in stream @return: front-load multiplier """ return Decimal(2.0) - (self.alpha**index)
class ENetServerEndpoint(object): implementer(IStreamServerEndpoint) def __init__(self, reactor, interface, port, maxclients, channels, maxdown=0, maxup=0, max_duplicate_peers=None): self._reactor = reactor self._interface = interface self._port = port self._maxclients = maxclients self._channels = channels self._maxdown = maxdown self._maxup = maxup self._max_duplicate_peers = max_duplicate_peers or 0xFFF self._factory = None self._address = None self._enet_host = None def listen(self, factory): self._factory = factory self._address = enet.Address(self._interface, self._port) enet_host = enet.Host(self._address, self._maxclients, self._channels, self._maxdown, self._maxup) self._enet_host = ENetHost(enet_host, factory) self._enet_host.duplicate_peers = self._max_duplicate_peers log.msg("%s starting on %s" % (self._getLogPrefix(), self._port)) self._reactor.addReader(self._enet_host) def flush(self): return self._enet_host.flush() def _getLogPrefix(self): pass
class MaestroGroupProvider(object): implementer(IGroupProvider) def __init__(self, domain, auth_name, groups): self.domain = domain self.auth_name = auth_name self._group_names = groups self._group_names.extend([ domain_to_auth_group(domain), "{self.auth_name}@{self.domain}".format(self=self) ]) def get_group_names(self): return self._group_names def __repr__(self): return "<MaestroAuthSuccess {authentication.auth_name}@{authentication.domain}>".format( authentication=self)
def trivialInput(symbol): """ Create a new L{IRichInput} implementation for the given input symbol. This creates a new type object and is intended to be used at module scope to define rich input types. Generally, only one use per symbol should be required. For example:: Apple = trivialInput(Fruit.apple) @param symbol: A symbol from some state machine's input alphabet. @return: A new type object usable as a rich input for the given symbol. @rtype: L{type} """ return implementer(IRichInput)(type( symbol.name.title(), (FancyStrMixin, object), { "symbol": _symbol(symbol), }))
def trivialInput(symbol): """ Create a new L{IRichInput} implementation for the given input symbol. This creates a new type object and is intended to be used at module scope to define rich input types. Generally, only one use per symbol should be required. For example:: Apple = trivialInput(Fruit.apple) @param symbol: A symbol from some state machine's input alphabet. @return: A new type object usable as a rich input for the given symbol. @rtype: L{type} """ return implementer(IRichInput)(type(symbol.name.title(), (FancyStrMixin, object), { "symbol": _symbol(symbol), }))
def register_object_factories(schema): """Register generic adapters for schema `Object` type""" manager = getGlobalSiteManager() for name, field in get_fields_from_schema(schema): if isinstance(field.field, ObjectField): f_schema = field.field.schema obj_name = "GenericObject{0}{1}".format(name[0].upper(), name[1:]) f_object = type( obj_name, (GenericObject,), {n: FieldProperty(f_schema[n]) for n, f in Fields(f_schema).items()}, ) setattr(sys.modules[__name__], obj_name, f_object) factory = implementer(f_schema)(f_object) manager.registerAdapter( generic_object_adapter(factory), required=(Interface, Interface, Interface, Interface), provided=IObjectFactory, name=f_schema.__identifier__, )
def make_interface_on_the_fly(cls): name = "I{}".format(cls.__name__) iface = InterfaceClass(name) implementer(iface)(cls) return iface
def close(self): self._fd.close() def fileno(self): return self._fd.fileno() def readConnectionLost(self, reason): self.close() def connectionLost(self, reason): self.close() def doRead(self): self._callback(self._fd) if have_twisted: Reader = implementer(IReadDescriptor)(Reader) class Writer(object): def __init__(self, fd, callback): self._fd = fd self._callback = callback def logPrefix(self): return "Writer" def close(self): self._fd.close() def fileno(self): return self._fd.fileno()
def tube(cls): """ L{tube} is a class decorator which declares a given class to be an implementer of L{ITube} and fills out any methods or attributes which are not present on the decorated type with null-implementation methods (those which return None) and None attributes. @param cls: A class with some or all of the attributes or methods described by L{ITube}. @type cls: L{type} @return: C{cls} @rtype: L{type} which implements L{ITube} """ # This is better than a superclass, because: # - you can't do a separate 'isinstance(Tube)' check instead of # ITube.providedBy like you're supposed to # - you can't just instantiate Tube directly, that is pointless # functionality so we're not providing it # - it avoids propagating a bad example that other codebases will copy to # depth:infinity, rather than depth:1 where subclassing is actually sort # of okay # - it provides a more straightforward and reliable mechanism for # future-proofing code. If you're inheriting from a superclass and you # want it to do something to warn users, upgrade an interface, and so on, # you have to try to cram a new meta-type into the user's hierarchy so a # function gets invoked at the right time. If you're invoking this class # decorator, then it just gets invoked like a normal function, and we can # put some code in here that examines the type and does whatever it wants # to do, because the @ syntax simply called it as a function. # It still shares some issues with inheritance, such as: # - the direction of visibility within the hierarchy is still wrong. you # can still do 'self.someMethodIDidntImplement()' and get a result. # - it destructively modifies the original class, so what you see isn't # quite what you get. a cleaner compositional approach would simply wrap # an object around another object (but that would mean inventing a new # incompletely-specified type that floats around at runtime, rather than # a utility to help you completely implement ITube at import time) def started(self): """ A null implementation of started. @param self: An instance of the C{tube} being defined. """ def stopped(self, reason): """ A null implementation of stopped. @param self: An instance of the C{tube} being defined. @param reason: see L{ITube} """ def received(self, item): """ A null implementation of received. @param self: An instance of the C{tube} being defined. @param item: see L{ITube} """ fillers = [('started', started), ('stopped', stopped), ('received', received), ('inputType', None), ('outputType', None)] notHere = object() for name, value in fillers: if getattr(cls, name, notHere) is notHere: setattr(cls, name, value) cls = implementer(ITube)(cls) verifyClass(ITube, cls) return cls
if name != name_with_package: return self.environment.get_template(name_with_package) else: raise def __call__(self, value, system): try: system.update(value) except (TypeError, ValueError): ex = sys.exc_info()[1] # py2.5 - 3.2 compat raise ValueError('renderer was passed non-dictionary ' 'as value: %s' % str(ex)) return self.template.render(system) Jinja2TemplateRenderer = \ implementer(ITemplateRenderer)(Jinja2TemplateRenderer) # 2.5 compat, ugh def add_jinja2_search_path(config, searchpath): """ This function is added as a method of a :term:`Configurator`, and should not be called directly. Instead it should be called like so after ``pyramid_jinja2`` has been passed to ``config.include``: .. code-block:: python config.add_jinja2_search_path('anotherpackage:templates/') It will add the directory or :term:`asset spec` passed as ``searchpath`` to the current search path of the ``jinja2.environment.Environment`` used by :mod:`pyramid_jinja2`. """
def count_1min(self, now): return self.counter.distinct(now - 60) def count_1hour(self, now): return self.counter.distinct(now - 60 * 60) def count_1day(self, now): return self.counter.distinct(now - 60 * 60 * 24) def process(self, fields): self.update(fields[0]) def update(self, item): self.counter.add(self.wall_time_func(), item) def flush(self, interval, timestamp): now = self.wall_time_func() metrics = [] items = {".count": self.count(), ".count_1min": self.count_1min(now), ".count_1hour": self.count_1hour(now), ".count_1day": self.count_1day(now)} for item, value in sorted(items.iteritems()): metrics.append((self.prefix + self.name + item, value, timestamp)) return metrics # if we are running anything >= 2.7 if sys.version_info[0:2] >= (2,7): DistinctMetricReporter = implementer(IMetric)(DistinctMetricReporter)
from twisted.web.client import HTTPConnectionPool from zope.interface import implementer from buildbot import config from buildbot.interfaces import IHttpResponse from buildbot.util import service from buildbot.util.logger import Logger try: import txrequests except ImportError: txrequests = None try: import treq implementer(IHttpResponse)(treq.response._Response) except ImportError: treq = None log = Logger() @implementer(IHttpResponse) class TxRequestsResponseWrapper(object): def __init__(self, res): self._res = res def content(self): return defer.succeed(self._res.content)
from kazoo.handlers.utils import create_tcp_socket from kazoo.interfaces import IAsyncResult from kazoo.interfaces import IHandler _using_libevent = gevent.__version__.startswith('0.') log = logging.getLogger(__name__) _STOP = object() if _using_libevent: # pragma: nocover from .gevent_pqueue import PeekableQueue as _PeekableQueue else: _PeekableQueue = gevent.queue.Queue AsyncResult = implementer(IAsyncResult)(gevent.event.AsyncResult) @implementer(IHandler) class SequentialGeventHandler(object): """Gevent handler for sequentially executing callbacks. This handler executes callbacks in a sequential manner. A queue is created for each of the callback events, so that each type of event has its callback type run sequentially. Each queue type has a greenlet worker that pulls the callback event off the queue and runs it in the order the client sees it. This split helps ensure that watch callbacks won't block session re-establishment should the connection be lost during a Zookeeper
def implementer(self, cls): return implementer(self.__call__(cls))(cls)
class ITestType(interface.interfaces.IInterface): pass class U: def __init__(self, name): self.__name__ = name def __repr__(self): return "%s(%s)" % (self.__class__.__name__, self.__name__) class U1(U): pass U1 = interface.implementer(I1)(U1) class U12(U): pass U12 = interface.implementer(I1, I2)(U12) class IA1(interface.Interface): pass class IA2(interface.Interface): pass class IA3(interface.Interface): pass
afterCompletion = beforeCompletion def tpc_begin(self, transaction, subtransaction=False): assert not subtransaction def tpc_vote(self, transaction): pass def tpc_finish(self, transaction): self.callable(*self.args) tpc_abort = abort # BBB Python 2.5 compat MailDataManager = implementer(IDataManager)(MailDataManager) class AbstractMailDelivery(object): def send(self, fromaddr, toaddrs, message): assert isinstance(message, Message), \ 'Message must be instance of email.message.Message' encoding.cleanup_message(message) messageid = message['Message-Id'] if messageid is None: messageid = message['Message-Id'] = make_msgid('repoze.sendmail') if message['Date'] is None: message['Date'] = formatdate() transaction.get().join( self.createDataManager(fromaddr, toaddrs, message))
def delay(self, seconds): self._reactor._io_loop.remove_timeout(self._timeout) self._time += seconds self._timeout = self._reactor._io_loop.add_timeout(self._time, self._called) def reset(self, seconds): self._reactor._io_loop.remove_timeout(self._timeout) self._time = self._reactor.seconds() + seconds self._timeout = self._reactor._io_loop.add_timeout(self._time, self._called) def active(self): return self._active # Fake class decorator for python 2.5 compatibility TornadoDelayedCall = implementer(IDelayedCall)(TornadoDelayedCall) class TornadoReactor(PosixReactorBase): """Twisted reactor built on the Tornado IOLoop. Since it is intented to be used in applications where the top-level event loop is ``io_loop.start()`` rather than ``reactor.run()``, it is implemented a little differently than other Twisted reactors. We override `mainLoop` instead of `doIteration` and must implement timed call functionality on top of `IOLoop.add_timeout` rather than using the implementation in `PosixReactorBase`. """ def __init__(self, io_loop=None): if not io_loop: io_loop = tornado.ioloop.IOLoop.instance()
# -*- coding:utf-8 -*- import logging logger = logging.getLogger(__name__) from zope.interface import implementer from .interfaces import IFailure, IHasMessage from block.form.validation import ValidationError ## todo: refine @implementer(IFailure) class CreationFailure(Exception): pass @implementer(IFailure) class UpdatingFailure(Exception): pass @implementer(IFailure) class DelitingFailure(Exception): pass ValidationFailure = implementer(IFailure)(ValidationError) @implementer(IFailure) class NotFoundFailure(Exception): pass @implementer(IFailure) class BadData(Exception): pass
def BeakerSessionFactoryConfig(**options): """ Return a Pyramid session factory using Beaker session settings supplied directly as ``**options``""" class PyramidBeakerSessionObject(SessionObject): _options = options _cookie_on_exception = _options.pop('cookie_on_exception', True) _constant_csrf_token = _options.pop('constant_csrf_token', False) def __init__(self, request): SessionObject.__init__(self, request.environ, **self._options) def session_callback(request, response): exception = getattr(request, 'exception', None) if ( (exception is None or self._cookie_on_exception) and self.accessed() ): self.persist() headers = self.__dict__['_headers'] if headers['set_cookie'] and headers['cookie_out']: response.headerlist.append( ('Set-Cookie', headers['cookie_out'])) request.add_response_callback(session_callback) # ISession API @property def new(self): return self.last_accessed is None changed = SessionObject.save # modifying dictionary methods @call_save def clear(self): return self._session().clear() @call_save def update(self, d, **kw): return self._session().update(d, **kw) @call_save def setdefault(self, k, d=None): return self._session().setdefault(k, d) @call_save def pop(self, k, d=None): return self._session().pop(k, d) @call_save def popitem(self): return self._session().popitem() __setitem__ = call_save(SessionObject.__setitem__) __delitem__ = call_save(SessionObject.__delitem__) # Flash API methods def flash(self, msg, queue='', allow_duplicate=True): storage = self.setdefault('_f_' + queue, []) if allow_duplicate or (msg not in storage): storage.append(msg) def pop_flash(self, queue=''): storage = self.pop('_f_' + queue, []) return storage def peek_flash(self, queue=''): storage = self.get('_f_' + queue, []) return storage # CSRF API methods def new_csrf_token(self): token = (self._constant_csrf_token or hexlify(os.urandom(20)).decode('ascii')) self['_csrft_'] = token return token def get_csrf_token(self): token = self.get('_csrft_', None) if token is None: token = self.new_csrf_token() return token return implementer(ISession)(PyramidBeakerSessionObject)