def removeObserver(self, event, observerfn): """ Remove callable as observer for an event. The observer callable is removed for all priority levels for the specified event. @param event: Event for which the observer callable was registered. @type event: C{str} or L{xpath.XPathQuery} @param observerfn: Observer callable to be unregistered. """ # If this is happening in the middle of the dispatch, queue # it up for processing after the dispatch completes if self._dispatchDepth > 0: self._updateQueue.append(lambda:self.removeObserver(event, observerfn)) return event, observers = self._getEventAndObservers(event) emptyLists = [] for priority, priorityObservers in iteritems(observers): for query, callbacklist in iteritems(priorityObservers): if event == query: callbacklist.removeCallback(observerfn) if callbacklist.isEmpty(): emptyLists.append((priority, query)) for priority, query in emptyLists: del observers[priority][query]
def sendHeader(self): """ Send stream header. """ # set up optional extra namespaces localPrefixes = {} for uri, prefix in iteritems(self.prefixes): if uri != NS_STREAMS: localPrefixes[prefix] = uri rootElement = domish.Element((NS_STREAMS, "stream"), self.namespace, localPrefixes=localPrefixes) if self.otherEntity: rootElement["to"] = self.otherEntity.userhost() if self.thisEntity: rootElement["from"] = self.thisEntity.userhost() if not self.initiating and self.sid: rootElement["id"] = self.sid if self.version >= (1, 0): rootElement["version"] = "%d.%d" % self.version self.send(rootElement.toXml(prefixes=self.prefixes, closeElement=0)) self._headerSent = True
def _openapi_to_v1_5_data_model(openapi): base = loads( FilePath(__file__).sibling("extra-1.5.json").getContent() ) for k, v in iteritems(openapi): if isinstance(v, dict): base.setdefault(k, {}).update(v) else: base[k] = v return _KubernetesDataModel.from_swagger( Swagger.from_document(base), u"version.Info", dict( major=u"1", minor=u"5", gitVersion=u"", gitCommit=u"", gitTreeState=u"", buildDate=u"", goVersion=u"", compiler=u"", platform=u"", ), {u"v1"}, {u"v1beta1"}, )
def get_data(self): """Return a C{dict} mapping APT preferences files to their contents. If no APT preferences configuration is set at all on the system, then simply return C{None} """ data = {} preferences_filename = os.path.join(self._etc_apt_directory, u"preferences") if os.path.exists(preferences_filename): data[preferences_filename] = read_text_file(preferences_filename) preferences_directory = os.path.join(self._etc_apt_directory, u"preferences.d") if os.path.isdir(preferences_directory): for entry in os.listdir(preferences_directory): filename = os.path.join(preferences_directory, entry) if os.path.isfile(filename): data[filename] = read_text_file(filename) if data == {}: return None item_size_limit = self.size_limit // len(data.keys()) for filename, contents in iteritems(data): if len(filename) + len(contents) > item_size_limit: truncated_contents_size = item_size_limit - len(filename) data[filename] = data[filename][0:truncated_contents_size] return data
def get_session_id(self, scope=None): """Generate a unique session identifier, persist it and return it. See also L{landscape.broker.server.BrokerServer.get_session_id} for more information on what this is used for. @param scope: A string identifying the scope of interest of requesting object. Currently this is unused but it has been implemented in preparation for a fix for bug #300278 so that we don't have to change the persisted structure later. When that fix is in place this will allow us to re-synchronise only certain types of information, limited by scope. """ session_ids = self._persist.get("session-ids", {}) for session_id, stored_scope in iteritems(session_ids): # This loop should be relatively short as it's intent is to limit # session-ids to one per scope. The or condition here is not # strictly necessary, but we *should* do "is" comparisons when we # can (so says PEP 8). if scope == stored_scope: return session_id session_id = str(uuid.uuid4()) session_ids[session_id] = scope self._persist.set("session-ids", session_ids) return session_id
def sendHeader(self): """ Send stream header. """ # set up optional extra namespaces localPrefixes = {} for uri, prefix in iteritems(self.prefixes): if uri != NS_STREAMS: localPrefixes[prefix] = uri rootElement = domish.Element((NS_STREAMS, 'stream'), self.namespace, localPrefixes=localPrefixes) if self.otherEntity: rootElement['to'] = self.otherEntity.userhost() if self.thisEntity: rootElement['from'] = self.thisEntity.userhost() if not self.initiating and self.sid: rootElement['id'] = self.sid if self.version >= (1, 0): rootElement['version'] = "%d.%d" % self.version self.send(rootElement.toXml(prefixes=self.prefixes, closeElement=0)) self._headerSent = True
def __init__(self, to=None, statuses=None): Presence.__init__(self, to, type='unavailable') if statuses is not None: for lang, status in iteritems(statuses): s = self.addElement('status', content=status) if lang: s[(NS_XML, "lang")] = lang
def set_hash_ids(self, cursor, hash_ids): """Set the ids of a set of hashes. @param hash_ids: a C{dict} of hash=>id mappings. """ for hash, id in iteritems(hash_ids): cursor.execute("REPLACE INTO hash VALUES (?, ?)", (id, sqlite3.Binary(hash)))
def _generateCacheEntry(provider): dropin = CachedDropin(provider.__name__, provider.__doc__) for k, v in iteritems(provider.__dict__): plugin = IPlugin(v, None) if plugin is not None: # Instantiated for its side-effects. CachedPlugin(dropin, k, v.__doc__, list(providedBy(plugin))) return dropin
def _detect_group_changes(self): """ Compare the current group snapshot to the old one and create a C{dict} with C{create-groups}, C{delete-groups}, C{create-group-members} and {delete-group-members} fields. Fields without data aren't included in the result. """ changes = {} creates, updates, deletes = diff(self._old_groups, self._new_groups) if creates: groups = [] create_members = {} for value in itervalues(creates): # Use a copy to avoid removing the 'members' element # from stored data. value = value.copy() members = value.pop("members") if members: create_members[value["name"]] = members groups.append(value) changes["create-groups"] = groups if create_members: changes["create-group-members"] = create_members if updates: remove_members = {} create_members = {} update_groups = [] for groupname, new_data in iteritems(updates): old_data = self._old_groups[groupname] old_members = set(old_data["members"]) new_members = set(new_data["members"]) created = new_members - old_members if created: create_members[groupname] = sorted(created) removed = old_members - new_members if removed: remove_members[groupname] = sorted(removed) if old_data["gid"] != new_data["gid"]: update_groups.append({ "name": groupname, "gid": new_data["gid"] }) if create_members: members = changes.setdefault("create-group-members", {}) members.update(create_members) if remove_members: members = changes.setdefault("delete-group-members", {}) members.update(remove_members) if update_groups: members = changes.setdefault("update-groups", []) members.extend(update_groups) if deletes: changes["delete-groups"] = sorted(deletes.keys()) return changes
def drop_session_ids(self, scopes=None): """Drop all session ids.""" new_session_ids = {} if scopes: session_ids = self._persist.get("session-ids", {}) for session_id, session_scope in iteritems(session_ids): if session_scope not in scopes: new_session_ids[session_id] = session_scope self._persist.set("session-ids", new_session_ids)
def typeCheck(self, fieldDefs=None, filterUnknown=False): """ Check values of fields according to the field definition. This method walks all named fields to check their values against their type, and is typically used for forms received from other entities. The field definition in C{fieldDefs} is used to check the field type. If C{filterUnknown} is L{True}, fields that are not present in C{fieldDefs} are removed from the form. If the field type is L{None} (when not set by the sending entity), the type from the field definitition is used, or C{'text-single'} if that is not set. If C{fieldDefs} is None, an empty dictionary is assumed. This is useful for coercing boolean and JID values on forms with type C{'form'}. @param fieldDefs: Field definitions as a dictionary. See L{wokkel.iwokkel.IPubSubService.getConfigurationOptions} @type fieldDefs: L{dict} @param filterUnknown: If L{True}, remove fields that are not in C{fieldDefs}. @type filterUnknown: L{bool} """ if fieldDefs is None: fieldDefs = {} filtered = [] for name, field in iteritems(self.fields): if name in fieldDefs: fieldDef = fieldDefs[name] if "type" not in fieldDef: fieldDef["type"] = "text-single" if field.fieldType is None: field.fieldType = fieldDef["type"] elif field.fieldType != fieldDef["type"]: raise TypeError("Field type for %r is %r, expected %r" % (name, field.fieldType, fieldDef["type"])) else: # Field type is correct pass field.typeCheck() elif filterUnknown: filtered.append(field) elif field.fieldType is not None: field.typeCheck() else: # Unknown field without type, no checking, no filtering pass for field in filtered: self.removeField(field)
def dispatch(self, obj, event=None): """ Dispatch an event. When C{event} is L{None}, an XPath type event is triggered, and C{obj} is assumed to be an instance of L{Element<twisted.words.xish.domish.Element>}. Otherwise, C{event} holds the name of the named event being triggered. In the latter case, C{obj} can be anything. @param obj: The object to be dispatched. @param event: Optional event name. @type event: C{str} """ foundTarget = False self._dispatchDepth += 1 if event != None: # Named event observers = self._eventObservers match = lambda query, obj: query == event else: # XPath event observers = self._xpathObservers match = lambda query, obj: query.matches(obj) priorities = list(observers.keys()) priorities.sort() priorities.reverse() emptyLists = [] for priority in priorities: for query, callbacklist in iteritems(observers[priority]): if match(query, obj): callbacklist.callback(obj) foundTarget = True if callbacklist.isEmpty(): emptyLists.append((priority, query)) for priority, query in emptyLists: del observers[priority][query] self._dispatchDepth -= 1 # If this is a dispatch within a dispatch, don't # do anything with the updateQueue -- it needs to # wait until we've back all the way out of the stack if self._dispatchDepth == 0: # Deal with pending update operations for f in self._updateQueue: f() self._updateQueue = [] return foundTarget
def unauthorized(self, message, host, port): m = self.responseFromRequest(401, message) for (scheme, auth) in iteritems(self.authorizers): chal = auth.getChallenge((host, port)) if chal is None: value = '%s realm="%s"' % (scheme.title(), self.host) else: value = '%s %s,realm="%s"' % (scheme.title(), chal, self.host) m.headers.setdefault('www-authenticate', []).append(value) self.deliverResponse(m)
def build_objects(): objs = {} names = set() for name, strategy in iteritems(to_create): while True: obj = strategy.example() if obj.metadata.name not in names: names.add(obj.metadata.name) break objs[name] = obj return objs
class _DiscoRequest(generic.Request): """ A Service Discovery request. @ivar verb: Type of request: C{'info'} or C{'items'}. @type verb: C{str} @ivar nodeIdentifier: Optional node to request info for. @type nodeIdentifier: C{unicode} """ verb = None nodeIdentifier = '' _requestVerbMap = { NS_DISCO_INFO: 'info', NS_DISCO_ITEMS: 'items', } _verbRequestMap = dict(((v, k) for k, v in iteritems(_requestVerbMap))) def __init__(self, verb=None, nodeIdentifier='', recipient=None, sender=None): generic.Request.__init__(self, recipient=recipient, sender=sender, stanzaType='get') self.verb = verb self.nodeIdentifier = nodeIdentifier def parseElement(self, element): generic.Request.parseElement(self, element) verbElement = None for child in element.elements(): if child.name == 'query' and child.uri in self._requestVerbMap: self.verb = self._requestVerbMap[child.uri] verbElement = child if verbElement: self.nodeIdentifier = verbElement.getAttribute('node', '') def toElement(self): element = generic.Request.toElement(self) childURI = self._verbRequestMap[self.verb] query = element.addElement((childURI, 'query')) if self.nodeIdentifier: query['node'] = self.nodeIdentifier return element
def test_alwaysBytes(self): """ The output of L{BytesEnviron} should always be a L{dict} with L{bytes} values and L{bytes} keys. """ result = bytesEnviron() types = set() for key, val in iteritems(result): types.add(type(key)) types.add(type(val)) self.assertEqual(list(types), [bytes])
def __init__(self, to=None, show=None, statuses=None, priority=0): Presence.__init__(self, to, type=None) if show in ['away', 'xa', 'chat', 'dnd']: self.addElement('show', content=show) if statuses is not None: for lang, status in iteritems(statuses): s = self.addElement('status', content=status) if lang: s[(NS_XML, "lang")] = lang if priority != 0: self.addElement('priority', content=unicode(int(priority)))
def handleRequest(self, iq): """ Find a handler and wrap the call for sending a response stanza. """ def toResult(result, iq): response = toResponse(iq, 'result') if result: if IElement.providedBy(result): response.addChild(result) else: for element in result: response.addChild(element) return response def checkNotImplemented(failure): failure.trap(NotImplementedError) raise error.StanzaError('feature-not-implemented') def fromStanzaError(failure, iq): failure.trap(error.StanzaError) return failure.value.toResponse(iq) def fromOtherError(failure, iq): log.msg("Unhandled error in iq handler:", isError=True) log.err(failure) return error.StanzaError('internal-server-error').toResponse(iq) handler = None for queryString, method in iteritems(self.iqHandlers): if xpath.internQuery(queryString).matches(iq): handler = getattr(self, method) if handler: d = defer.maybeDeferred(handler, iq) else: d = defer.fail(NotImplementedError()) d.addCallback(toResult, iq) d.addErrback(checkNotImplemented) d.addErrback(fromStanzaError, iq) d.addErrback(fromOtherError, iq) d.addCallback(self.send) iq.handled = True
def makeFields(self, values, fieldDefs=None, filterUnknown=True): """ Create fields from values and add them to this form. This creates fields from a mapping of name to value(s) and adds them to this form. It is typically used for generating outgoing forms. If C{fieldDefs} is not L{None}, this is used to fill in additional properties of fields, like the field types, labels and possible options. If C{filterUnknown} is L{True} and C{fieldDefs} is not L{None}, fields will only be created from C{values} with a corresponding entry in C{fieldDefs}. If the field type is unknown, the field type is L{None}. When the form is rendered using L{toElement}, these fields will have no C{'type'} attribute, and it is up to the receiving party to interpret the values properly (e.g. by knowing about the FORM_TYPE in C{formNamespace} and the field name). @param values: Values to create fields from. @type values: L{dict} @param fieldDefs: Field definitions as a dictionary. See L{wokkel.iwokkel.IPubSubService.getConfigurationOptions} @type fieldDefs: L{dict} @param filterUnknown: If L{True}, ignore fields that are not in C{fieldDefs}. @type filterUnknown: L{bool} """ for name, value in iteritems(values): fieldDict = {"var": name, "type": None} if fieldDefs is not None: if name in fieldDefs: fieldDict.update(fieldDefs[name]) elif filterUnknown: continue if isinstance(value, list): fieldDict["values"] = value else: fieldDict["value"] = value self.addField(Field.fromDict(fieldDict))
def fromElement(element): field = Field(None) for eAttr, fAttr in iteritems({"type": "fieldType", "var": "var", "label": "label"}): value = element.getAttribute(eAttr) if value: setattr(field, fAttr, value) for child in element.elements(): if child.uri != NS_X_DATA: continue func = getattr(Field, "_parse_" + child.name, None) if func: func(field, child) return field
def test_callbacksCalled(self): """ The correct callbacks fire, and that *only* those fire. """ sentencesByType = { 'GPGGA': [b'$GPGGA*56'], 'GPGLL': [b'$GPGLL*50'], 'GPGSA': [b'$GPGSA*42'], 'GPGSV': [b'$GPGSV*55'], 'GPHDT': [b'$GPHDT*4f'], 'GPRMC': [b'$GPRMC*4b'] } for sentenceType, sentences in iteritems(sentencesByType): for sentence in sentences: self.protocol.lineReceived(sentence) self.assertEqual(self.sentenceTypes, set([sentenceType])) self.sentenceTypes.clear()
def test_callbacksCalled(self): """ The correct callbacks fire, and that *only* those fire. """ sentencesByType = { "GPGGA": [b"$GPGGA*56"], "GPGLL": [b"$GPGLL*50"], "GPGSA": [b"$GPGSA*42"], "GPGSV": [b"$GPGSV*55"], "GPHDT": [b"$GPHDT*4f"], "GPRMC": [b"$GPRMC*4b"], } for sentenceType, sentences in iteritems(sentencesByType): for sentence in sentences: self.protocol.lineReceived(sentence) self.assertEqual(self.sentenceTypes, set([sentenceType])) self.sentenceTypes.clear()
def toElement(self): if not self.available: self.stanzaType = 'unavailable' presence = BasePresence.toElement(self) if self.available: if self.show in ('chat', 'away', 'xa', 'dnd'): presence.addElement('show', content=self.show) if self.priority != 0: presence.addElement('priority', content=unicode(self.priority)) for lang, text in iteritems(self.statuses): status = presence.addElement('status', content=text) if lang: status[(NS_XML, 'lang')] = lang return presence
def streamStarted(self, rootElement): """ Called by the XmlStream when the stream has started. This extends L{Authenticator.streamStarted} to extract further information from the stream headers from C{rootElement}. """ Authenticator.streamStarted(self, rootElement) self.xmlstream.namespace = rootElement.defaultUri if rootElement.hasAttribute("to"): self.xmlstream.thisEntity = jid.internJID(rootElement["to"]) self.xmlstream.prefixes = {} for prefix, uri in iteritems(rootElement.localPrefixes): self.xmlstream.prefixes[uri] = prefix self.xmlstream.sid = hexlify(randbytes.secureRandom(8)).decode('ascii')
def streamStarted(self, rootElement): """ Called by the XmlStream when the stream has started. This extends L{Authenticator.streamStarted} to extract further information from the stream headers from C{rootElement}. """ Authenticator.streamStarted(self, rootElement) self.xmlstream.namespace = rootElement.defaultUri if rootElement.hasAttribute("to"): self.xmlstream.thisEntity = jid.internJID(rootElement["to"]) self.xmlstream.prefixes = {} for prefix, uri in iteritems(rootElement.localPrefixes): self.xmlstream.prefixes[uri] = prefix self.xmlstream.sid = hexlify(randbytes.secureRandom(8)).decode("ascii")
def coerce(self, value): new_dict = {} if not isinstance(value, dict): raise InvalidError("%r is not a dict." % (value,)) for k, v in iteritems(value): if k not in self.schema: raise InvalidError("%r is not a valid key as per %r" % (k, self.schema)) try: new_dict[k] = self.schema[k].coerce(v) except InvalidError as e: raise InvalidError( "Value of %r key of dict %r could not coerce with %s: %s" % (k, value, self.schema[k], e)) new_keys = set(new_dict.keys()) required_keys = set(self.schema.keys()) - self.optional missing = required_keys - new_keys if missing: raise InvalidError("Missing keys %s" % (missing,)) return new_dict
def cmd_END(self): """ This the end token to a get or a stat operation. """ cmd = self._current.popleft() if cmd.command == b"get": if cmd.multiple: values = {key: val[::2] for key, val in iteritems(cmd.values)} cmd.success(values) else: cmd.success((cmd.flags, cmd.value)) elif cmd.command == b"gets": if cmd.multiple: cmd.success(cmd.values) else: cmd.success((cmd.flags, cmd.cas, cmd.value)) elif cmd.command == b"stats": cmd.success(cmd.values) else: raise RuntimeError("Unexpected END response to %s command" % (nativeString(cmd.command), ))
def streamStarted(self, rootElement): xmlstream.ListenAuthenticator.streamStarted(self, rootElement) if self.xmlstream.thisEntity: targetDomain = self.xmlstream.thisEntity.host else: targetDomain = self.service.defaultDomain def prepareStream(domain): self.xmlstream.namespace = self.namespace self.xmlstream.prefixes = {xmlstream.NS_STREAMS: 'stream', NS_DIALBACK: 'db'} if domain: self.xmlstream.thisEntity = jid.internJID(domain) try: if xmlstream.NS_STREAMS != rootElement.uri or \ self.namespace != self.xmlstream.namespace or \ ('db', NS_DIALBACK) not in iteritems(rootElement.localPrefixes): raise error.StreamError('invalid-namespace') if targetDomain and targetDomain not in self.service.domains: raise error.StreamError('host-unknown') except error.StreamError as exc: prepareStream(self.service.defaultDomain) self.xmlstream.sendStreamError(exc) return self.xmlstream.addObserver("//verify[@xmlns='%s']" % NS_DIALBACK, trapStreamError(self.xmlstream, self.onVerify)) self.xmlstream.addObserver("//result[@xmlns='%s']" % NS_DIALBACK, self.onResult) prepareStream(targetDomain) self.xmlstream.sendHeader() if self.xmlstream.version >= (1, 0): features = domish.Element((xmlstream.NS_STREAMS, 'features')) self.xmlstream.send(features)
def fromDict(fieldDict): """ Create a field from a dictionary. This is a short hand for passing arguments directly on Field object creation. The field type is represented by the C{'type'} key. For C{'options'} the value is not a list of L{Option}s, but a dictionary keyed by value, with an optional label as value. """ kwargs = fieldDict.copy() if "type" in fieldDict: kwargs["fieldType"] = fieldDict["type"] del kwargs["type"] if "options" in fieldDict: options = [] for value, label in iteritems(fieldDict["options"]): options.append(Option(value, label)) kwargs["options"] = options return Field(**kwargs)
def handleRequest(self, xml): """ Find a handler and call it directly. @param xml: XML stanza that may yield a handler being called. @type xml: C{str}. @return: Deferred that fires with the result of a handler for this stanza. If no handler was found, the deferred has its errback called with a C{NotImplementedError} exception. """ handler = None iq = parseXml(xml) for queryString, method in iteritems(self.service.iqHandlers): if xpath.internQuery(queryString).matches(iq): handler = getattr(self.service, method) if handler: d = defer.maybeDeferred(handler, iq) else: d = defer.fail(NotImplementedError()) return d
def cmd_END(self): """ This the end token to a get or a stat operation. """ cmd = self._current.popleft() if cmd.command == b"get": if cmd.multiple: values = {key: val[::2] for key, val in iteritems(cmd.values)} cmd.success(values) else: cmd.success((cmd.flags, cmd.value)) elif cmd.command == b"gets": if cmd.multiple: cmd.success(cmd.values) else: cmd.success((cmd.flags, cmd.cas, cmd.value)) elif cmd.command == b"stats": cmd.success(cmd.values) else: raise RuntimeError( "Unexpected END response to %s command" % (nativeString(cmd.command),))
def send_message(self, urgent): if not self.do_send: return self.do_send = False graphs = list(self.registry.store.get_graphs()) for graph_id, filename, user in graphs: if graph_id not in self._data: if os.path.isfile(filename): script_hash = self._get_script_hash(filename) self._data[graph_id] = { "values": [], "error": u"", "script-hash": script_hash} message = {"type": self.message_type, "data": self._data} new_data = {} for graph_id, item in iteritems(self._data): script_hash = item["script-hash"] new_data[graph_id] = { "values": [], "error": u"", "script-hash": script_hash} self._data = new_data self.registry.broker.send_message(message, self._session_id, urgent=urgent)
def _unparse(self, directives): """ Create message string from directives. @param directives: dictionary of directives (names to their values). For certain directives, extra quotes are added, as needed. @type directives: C{dict} of C{str} to C{str} @return: message string. @rtype: C{str}. """ directive_list = [] for name, value in iteritems(directives): if name in (b'username', b'realm', b'cnonce', b'nonce', b'digest-uri', b'authzid', b'cipher'): directive = name + b'=' + value else: directive = name + b'=' + value directive_list.append(directive) return b','.join(directive_list)
def unparseEndpoint(args, kwargs): """ Un-parse the already-parsed args and kwargs back into endpoint syntax. @param args: C{:}-separated arguments @type args: L{tuple} of native L{str} @param kwargs: C{:} and then C{=}-separated keyword arguments @type arguments: L{tuple} of native L{str} @return: a string equivalent to the original format which this was parsed as. @rtype: native L{str} """ description = ':'.join( [quoteStringArgument(str(arg)) for arg in args] + sorted(['%s=%s' % (quoteStringArgument(str(key)), quoteStringArgument(str(value))) for key, value in iteritems(kwargs) ])) return description
def _unparse(self, directives): """ Create message string from directives. @param directives: dictionary of directives (names to their values). For certain directives, extra quotes are added, as needed. @type directives: C{dict} of C{str} to C{str} @return: message string. @rtype: C{str}. """ directive_list = [] for name, value in iteritems(directives): if name in (b'username', b'realm', b'cnonce', b'nonce', b'digest-uri', b'authzid', b'cipher'): directive = b'%s="%s"' % (name, value) else: directive = b'%s=%s' % (name, value) directive_list.append(directive) return b','.join(directive_list)
def unparseEndpoint(args, kwargs): """ Un-parse the already-parsed args and kwargs back into endpoint syntax. @param args: C{:}-separated arguments @type args: L{tuple} of native L{str} @param kwargs: C{:} and then C{=}-separated keyword arguments @type arguments: L{tuple} of native L{str} @return: a string equivalent to the original format which this was parsed as. @rtype: native L{str} """ description = ':'.join( [quoteStringArgument(str(arg)) for arg in args] + sorted([ '%s=%s' % (quoteStringArgument(str(key)), quoteStringArgument(str(value))) for key, value in iteritems(kwargs) ])) return description
def __str__(self): if self._str is not None: return self._str if hasattr(self, 'func'): if hasattr(self.func, 'func_name'): func = self.func.func_name if hasattr(self.func, 'im_class'): func = self.func.im_class.__name__ + '.' + func else: func = reflect.safe_repr(self.func) else: func = None now = self.seconds() L = [ "<DelayedCall %s [%ss] called=%s cancelled=%s" % (id(self), self.time - now, self.called, self.cancelled) ] if func is not None: L.extend((" ", func, "(")) if self.args: L.append(", ".join([reflect.safe_repr(e) for e in self.args])) if self.kw: L.append(", ") if self.kw: L.append(", ".join([ '%s=%s' % (k, reflect.safe_repr(v)) for (k, v) in iteritems(self.kw) ])) L.append(")") if self.debug: L.append(("\n\ntraceback at creation: \n\n%s") % (' '.join(self.creator))) L.append('>') return "".join(L)
def __call__(self, *children, **kw): """ Add children and change attributes on this tag. This is implemented using __call__ because it then allows the natural syntax:: table(tr1, tr2, width="100%", height="50%", border="1") Children may be other tag instances, strings, functions, or any other object which has a registered flatten. Attributes may be 'transparent' tag instances (so that C{a(href=transparent(data="foo", render=myhrefrenderer))} works), strings, functions, or any other object which has a registered flattener. If the attribute is a python keyword, such as 'class', you can add an underscore to the name, like 'class_'. There is one special keyword argument, 'render', which will be used as the name of the renderer and saved as the 'render' attribute of this instance, rather than the DOM 'render' attribute in the attributes dictionary. """ self.children.extend(children) for k, v in iteritems(kw): if k[-1] == '_': k = k[:-1] if k == 'render': self.render = v else: self.attributes[k] = v return self
def getPlugins(interface, package=None): """ Retrieve all plugins implementing the given interface beneath the given module. @param interface: An interface class. Only plugins which implement this interface will be returned. @param package: A package beneath which plugins are installed. For most uses, the default value is correct. @return: An iterator of plugins. """ if package is None: import twisted.plugins as package allDropins = getCache(package) for key, dropin in iteritems(allDropins): for plugin in dropin.plugins: try: adapted = interface(plugin, None) except: log.err() else: if adapted is not None: yield adapted
def __init__( self, fieldType="text-single", var=None, value=None, values=None, options=None, label=None, desc=None, required=False, ): """ Initialize this field. See the identically named instance variables for descriptions. If C{value} is not L{None}, it overrides C{values}, setting the given value as the only value for this field. """ self.fieldType = fieldType self.var = var if value is not None: self.value = value else: self.values = values or [] self.label = label try: self.options = [Option(optionValue, optionLabel) for optionValue, optionLabel in iteritems(options)] except AttributeError: self.options = options or [] self.desc = desc self.required = required
def _checkProcessArgs(self, args, env): """ Check for valid arguments and environment to spawnProcess. @return: A two element tuple giving values to use when creating the process. The first element of the tuple is a C{list} of C{bytes} giving the values for argv of the child process. The second element of the tuple is either L{None} if C{env} was L{None} or a C{dict} mapping C{bytes} environment keys to C{bytes} environment values. """ # Any unicode string which Python would successfully implicitly # encode to a byte string would have worked before these explicit # checks were added. Anything which would have failed with a # UnicodeEncodeError during that implicit encoding step would have # raised an exception in the child process and that would have been # a pain in the butt to debug. # # So, we will explicitly attempt the same encoding which Python # would implicitly do later. If it fails, we will report an error # without ever spawning a child process. If it succeeds, we'll save # the result so that Python doesn't need to do it implicitly later. # # -exarkun defaultEncoding = sys.getfilesystemencoding() # Common check function def argChecker(arg): """ Return either L{bytes} or L{None}. If the given value is not allowable for some reason, L{None} is returned. Otherwise, a possibly different object which should be used in place of arg is returned. This forces unicode encoding to happen now, rather than implicitly later. """ if isinstance(arg, unicode): try: arg = arg.encode(defaultEncoding) except UnicodeEncodeError: return None if isinstance(arg, bytes) and b'\0' not in arg: return arg return None # Make a few tests to check input validity if not isinstance(args, (tuple, list)): raise TypeError("Arguments must be a tuple or list") outputArgs = [] for arg in args: arg = argChecker(arg) if arg is None: raise TypeError("Arguments contain a non-string value") else: outputArgs.append(arg) outputEnv = None if env is not None: outputEnv = {} for key, val in iteritems(env): key = argChecker(key) if key is None: raise TypeError("Environment contains a non-string key") val = argChecker(val) if val is None: raise TypeError("Environment contains a non-string value") outputEnv[key] = val return outputArgs, outputEnv
def getCache(module): """ Compute all the possible loadable plugins, while loading as few as possible and hitting the filesystem as little as possible. @param module: a Python module object. This represents a package to search for plugins. @return: a dictionary mapping module names to L{CachedDropin} instances. """ allCachesCombined = {} mod = getModule(module.__name__) # don't want to walk deep, only immediate children. buckets = {} # Fill buckets with modules by related entry on the given package's # __path__. There's an abstraction inversion going on here, because this # information is already represented internally in twisted.python.modules, # but it's simple enough that I'm willing to live with it. If anyone else # wants to fix up this iteration so that it's one path segment at a time, # be my guest. --glyph for plugmod in mod.iterModules(): fpp = plugmod.filePath.parent() if fpp not in buckets: buckets[fpp] = [] bucket = buckets[fpp] bucket.append(plugmod) for pseudoPackagePath, bucket in iteritems(buckets): dropinPath = pseudoPackagePath.child('dropin.cache') try: lastCached = dropinPath.getModificationTime() with dropinPath.open('r') as f: dropinDotCache = pickle.load(f) except: dropinDotCache = {} lastCached = 0 needsWrite = False existingKeys = {} for pluginModule in bucket: pluginKey = pluginModule.name.split('.')[-1] existingKeys[pluginKey] = True if ((pluginKey not in dropinDotCache) or (pluginModule.filePath.getModificationTime() >= lastCached)): needsWrite = True try: provider = pluginModule.load() except: # dropinDotCache.pop(pluginKey, None) log.err() else: entry = _generateCacheEntry(provider) dropinDotCache[pluginKey] = entry # Make sure that the cache doesn't contain any stale plugins. for pluginKey in list(dropinDotCache.keys()): if pluginKey not in existingKeys: del dropinDotCache[pluginKey] needsWrite = True if needsWrite: try: dropinPath.setContent(pickle.dumps(dropinDotCache)) except OSError as e: log.msg( format=( "Unable to write to plugin cache %(path)s: error " "number %(errno)d"), path=dropinPath.path, errno=e.errno) except: log.err(None, "Unexpected error while writing cache file") allCachesCombined.update(dropinDotCache) return allCachesCombined
def serialize(self, elem, closeElement=1, defaultUri=''): # Optimization shortcuts write = self.writelist.append # Shortcut, check to see if elem is actually a chunk o' serialized XML if isinstance(elem, SerializedXML): write(elem) return # Shortcut, check to see if elem is actually a string (aka Cdata) if isinstance(elem, StringType): write(escapeToXml(elem)) return # Further optimizations name = elem.name uri = elem.uri defaultUri, currentDefaultUri = elem.defaultUri, defaultUri for p, u in iteritems(elem.localPrefixes): self.prefixes[u] = p self.prefixStack.append(list(elem.localPrefixes.keys())) # Inherit the default namespace if defaultUri is None: defaultUri = currentDefaultUri if uri is None: uri = defaultUri prefix = None if uri != defaultUri or uri in self.prefixes: prefix = self.getPrefix(uri) inScope = self.prefixInScope(prefix) # Create the starttag if not prefix: write("<%s" % (name)) else: write("<%s:%s" % (prefix, name)) if not inScope: write(" xmlns:%s='%s'" % (prefix, uri)) self.prefixStack[-1].append(prefix) inScope = True if defaultUri != currentDefaultUri and \ (uri != defaultUri or not prefix or not inScope): write(" xmlns='%s'" % (defaultUri)) for p, u in iteritems(elem.localPrefixes): write(" xmlns:%s='%s'" % (p, u)) # Serialize attributes for k,v in elem.attributes.items(): # If the attribute name is a tuple, it's a qualified attribute if isinstance(k, tuple): attr_uri, attr_name = k attr_prefix = self.getPrefix(attr_uri) if not self.prefixInScope(attr_prefix): write(" xmlns:%s='%s'" % (attr_prefix, attr_uri)) self.prefixStack[-1].append(attr_prefix) write(" %s:%s='%s'" % (attr_prefix, attr_name, escapeToXml(v, 1))) else: write((" %s='%s'" % ( k, escapeToXml(v, 1)))) # Shortcut out if this is only going to return # the element (i.e. no children) if closeElement == 0: write(">") return # Serialize children if len(elem.children) > 0: write(">") for c in elem.children: self.serialize(c, defaultUri=defaultUri) # Add closing tag if not prefix: write("</%s>" % (name)) else: write("</%s:%s>" % (prefix, name)) else: write("/>") self.prefixStack.pop()
def _flattenElement(request, root, slotData, renderFactory, dataEscaper): """ Make C{root} slightly more flat by yielding all its immediate contents as strings, deferreds or generators that are recursive calls to itself. @param request: A request object which will be passed to L{IRenderable.render}. @param root: An object to be made flatter. This may be of type C{unicode}, C{str}, L{slot}, L{Tag <twisted.web.template.Tag>}, L{tuple}, L{list}, L{GeneratorType}, L{Deferred}, or an object that implements L{IRenderable}. @param slotData: A C{list} of C{dict} mapping C{str} slot names to data with which those slots will be replaced. @param renderFactory: If not C{None}, an object that provides L{IRenderable}. @param dataEscaper: A 1-argument callable which takes L{bytes} or L{unicode} and returns L{bytes}, quoted as appropriate for the rendering context. This is really only one of two values: L{attributeEscapingDoneOutside} or L{escapeForContent}, depending on whether the rendering context is within an attribute or not. See the explanation in L{flattenWithAttributeEscaping}. @return: An iterator that eventually yields L{bytes} that should be written to the output. However it may also yield other iterators or L{Deferred}s; if it yields another iterator, the caller will iterate it; if it yields a L{Deferred}, the result of that L{Deferred} will either be L{bytes}, in which case it's written, or another generator, in which case it is iterated. See L{_flattenTree} for the trampoline that consumes said values. @rtype: An iterator which yields L{bytes}, L{Deferred}, and more iterators of the same type. """ def keepGoing(newRoot, dataEscaper=dataEscaper, renderFactory=renderFactory): return _flattenElement(request, newRoot, slotData, renderFactory, dataEscaper) if isinstance(root, (bytes, unicode)): yield dataEscaper(root) elif isinstance(root, slot): slotValue = _getSlotValue(root.name, slotData, root.default) yield keepGoing(slotValue) elif isinstance(root, CDATA): yield b'<![CDATA[' yield escapedCDATA(root.data) yield b']]>' elif isinstance(root, Comment): yield b'<!--' yield escapedComment(root.data) yield b'-->' elif isinstance(root, Tag): slotData.append(root.slotData) if root.render is not None: rendererName = root.render rootClone = root.clone(False) rootClone.render = None renderMethod = renderFactory.lookupRenderMethod(rendererName) result = renderMethod(request, rootClone) yield keepGoing(result) slotData.pop() return if not root.tagName: yield keepGoing(root.children) return yield b'<' if isinstance(root.tagName, unicode): tagName = root.tagName.encode('ascii') else: tagName = root.tagName yield tagName for k, v in iteritems(root.attributes): if isinstance(k, unicode): k = k.encode('ascii') yield b' ' + k + b'="' # Serialize the contents of the attribute, wrapping the results of # that serialization so that _everything_ is quoted. attribute = keepGoing(v, attributeEscapingDoneOutside) yield flattenWithAttributeEscaping(attribute) yield b'"' if root.children or nativeString(tagName) not in voidElements: yield b'>' # Regardless of whether we're in an attribute or not, switch back # to the escapeForContent dataEscaper. The contents of a tag must # be quoted no matter what; in the top-level document, just so # they're valid, and if they're within an attribute, they have to # be quoted so that after applying the *un*-quoting required to re- # parse the tag within the attribute, all the quoting is still # correct. yield keepGoing(root.children, escapeForContent) yield b'</' + tagName + b'>' else: yield b' />' elif isinstance(root, (tuple, list, GeneratorType)): for element in root: yield keepGoing(element) elif isinstance(root, CharRef): escaped = '&#%d;' % (root.ordinal,) yield escaped.encode('ascii') elif isinstance(root, Deferred): yield root.addCallback(lambda result: (result, keepGoing(result))) elif IRenderable.providedBy(root): result = root.render(request) yield keepGoing(result, renderFactory=root) else: raise UnsupportedType(root)
def test_avoidLeakingFileDescriptors(self): """ If associated with a protocol which does not provide L{IFileDescriptorReceiver}, file descriptors received by the L{IUNIXTransport} implementation are closed and a warning is emitted. """ # To verify this, establish a connection. Send one end of the # connection over the IUNIXTransport implementation. After the copy # should no longer exist, close the original. If the opposite end of # the connection decides the connection is closed, the copy does not # exist. from socket import socketpair probeClient, probeServer = socketpair() events = [] addObserver(events.append) self.addCleanup(removeObserver, events.append) class RecordEndpointAddresses(SendFileDescriptor): def connectionMade(self): self.hostAddress = self.transport.getHost() self.peerAddress = self.transport.getPeer() SendFileDescriptor.connectionMade(self) server = RecordEndpointAddresses(probeClient.fileno(), b"junk") client = ConnectableProtocol() runProtocolsWithReactor(self, server, client, self.endpoints) # Get rid of the original reference to the socket. probeClient.close() # A non-blocking recv will return "" if the connection is closed, as # desired. If the connection has not been closed, because the # duplicate file descriptor is still open, it will fail with EAGAIN # instead. probeServer.setblocking(False) self.assertEqual(b"", probeServer.recv(1024)) # This is a surprising circumstance, so it should be logged. format = ( "%(protocolName)s (on %(hostAddress)r) does not " "provide IFileDescriptorReceiver; closing file " "descriptor received (from %(peerAddress)r).") clsName = "ConnectableProtocol" # Reverse host and peer, since the log event is from the client # perspective. expectedEvent = dict(hostAddress=server.peerAddress, peerAddress=server.hostAddress, protocolName=clsName, format=format) for logEvent in events: for k, v in iteritems(expectedEvent): if v != logEvent.get(k): break else: # No mismatches were found, stop looking at events break else: # No fully matching events were found, fail the test. self.fail( "Expected event (%s) not found in logged events (%s)" % ( expectedEvent, pformat(events,)))