def onREAD(self, address, data): event = unpickle(data) channel = event.channel target = event.target source = event.source if source == self.ourself: return self.send(event, channel, target)
def hadoop_line_features(line): ''' Convert a correctly-formatted line of text to a list of GeoJSON features. Allows Hadoop to stream features from the mapper to the reducer. See also skeletron-hadoop-mapper.py and skeletron-hadoop-reducer.py. ''' id, prop, geom = line.split() id = json_decode(id) properties = dict(unpickle(b64decode(prop))) geometry = wkb_decode(b64decode(geom)) parts = geometry.geoms if hasattr(geometry, 'geoms') else [geometry] return [dict(type='Feature', id=id, properties=properties, geometry=part.__geo_interface__) for part in parts if hasattr(part, '__geo_interface__')]
def updateFreeBusySet(value, directory): try: value = zlib.decompress(value) except zlib.error: # Legacy data - not zlib compressed pass try: doc = davxml.WebDAVDocument.fromString(value) freeBusySet = doc.root_element except ValueError: try: freeBusySet = unpickle(value) except UnpicklingError: log.err("Invalid free/busy property value") # MOR: continue on? return None fbset = set() didUpdate = False for href in freeBusySet.children: href = str(href) newHref = updateFreeBusyHref(href, directory) if newHref is None: fbset.add(href) else: didUpdate = True if newHref != "": fbset.add(newHref) if didUpdate: property = caldavxml.CalendarFreeBusySet(*[davxml.HRef(href) for href in fbset]) value = compress(property.toxml()) return value return None # no update required
def updateFreeBusySet(value, directory): try: value = zlib.decompress(value) except zlib.error: # Legacy data - not zlib compressed pass try: doc = element.WebDAVDocument.fromString(value) freeBusySet = doc.root_element except ValueError: try: freeBusySet = unpickle(value) except UnpicklingError: log.error("Invalid free/busy property value") returnValue(None) fbset = set() didUpdate = False for href in freeBusySet.children: href = str(href) newHref = yield updateFreeBusyHref(href, directory) if newHref is None: fbset.add(href) else: didUpdate = True if newHref != "": fbset.add(newHref) if didUpdate: property = caldavxml.CalendarFreeBusySet( *[element.HRef(fbhref) for fbhref in fbset] ) value = compress(property.toxml()) returnValue(value) returnValue(None) # no update required
class xattrPropertyStore(object): """ This implementation uses Bob Ippolito's xattr package, available from:: http://undefined.org/python/#xattr Note that the Bob's xattr package is specific to Linux and Darwin, at least presently. """ # # Dead properties are stored as extended attributes on disk. In order to # avoid conflicts with other attributes, prefix dead property names. # deadPropertyXattrPrefix = "WebDAV:" # Linux seems to require that attribute names use a "user." prefix. # FIXME: Is is a system-wide thing, or a per-filesystem thing? # If the latter, how to we detect the file system? if sys.platform == "linux2": deadPropertyXattrPrefix = "user." def _encode(clazz, name, uid=None): result = urllib.quote(encodeXMLName(*name), safe='{}:') if uid: result = uid + result r = clazz.deadPropertyXattrPrefix + result return r def _decode(clazz, name): name = urllib.unquote(name[len(clazz.deadPropertyXattrPrefix):]) index1 = name.find("{") index2 = name.find("}") if (index1 is -1 or index2 is -1 or not len(name) > index2): raise ValueError("Invalid encoded name: %r" % (name, )) if index1 == 0: uid = None else: uid = name[:index1] propnamespace = name[index1 + 1:index2] propname = name[index2 + 1:] return (propnamespace, propname, uid) _encode = classmethod(_encode) _decode = classmethod(_decode) def __init__(self, resource): self.resource = resource self.attrs = xattr.xattr(self.resource.fp.path) def get(self, qname, uid=None): """ Retrieve the value of a property stored as an extended attribute on the wrapped path. @param qname: The property to retrieve as a two-tuple of namespace URI and local name. @param uid: The per-user identifier for per user properties. @raise HTTPError: If there is no value associated with the given property. @return: A L{WebDAVDocument} representing the value associated with the given property. """ try: data = self.attrs.get(self._encode(qname, uid)) except KeyError: raise HTTPError( StatusResponse( responsecode.NOT_FOUND, "No such property: %s" % (encodeXMLName(*qname), ))) except IOError, e: if e.errno in _ATTR_MISSING or e.errno == errno.ENOENT: raise HTTPError( StatusResponse( responsecode.NOT_FOUND, "No such property: %s" % (encodeXMLName(*qname), ))) else: raise HTTPError( StatusResponse( statusForFailure(Failure()), "Unable to read property: %s" % (encodeXMLName(*qname), ))) # # Unserialize XML data from an xattr. The storage format has changed # over time: # # 1- Started with XML # 2- Started compressing the XML due to limits on xattr size # 3- Switched to pickle which is faster, still compressing # 4- Back to compressed XML for interoperability, size # # We only write the current format, but we also read the old # ones for compatibility. # legacy = False try: data = decompress(data) except zlib.error: legacy = True try: doc = WebDAVDocument.fromString(data) except ValueError: try: doc = unpickle(data) except UnpicklingError: format = "Invalid property value stored on server: %s %s" msg = format % (encodeXMLName(*qname), data) err(None, msg) raise HTTPError( StatusResponse(responsecode.INTERNAL_SERVER_ERROR, msg)) else: legacy = True if legacy: self.set(doc.root_element) return doc.root_element
# # We only write the current format, but we also read the old # ones for compatibility. # legacy = False try: data = decompress(data) except ZlibError: legacy = True try: doc = WebDAVDocument.fromString(data) except ValueError: try: doc = unpickle(data) except UnpicklingError: msg = "Invalid property value stored on server: %s %s" % ( key.toString(), data ) self.log.error(msg) raise PropertyStoreError(msg) else: legacy = True if legacy: # XXX untested: CDT catches this though. self._setitem_uid(key, doc.root_element, uid) return doc.root_element
from cPickle import loads as unpickle; import sys, marshal, types; sys.path[-1] = "/tmp/"; callcode = unpickle(sys.stdin.read()); module=callcode["module"]; open("/tmp/sandbox.py","w"); import sandbox; funcs=[marshal.loads(c) for c in callcode["funcs"].values()]; map(lambda code: setattr(sandbox, code.co_name, types.FunctionType(code, sandbox.__dict__, code.co_name)), funcs); args=unpickle(callcode["args"]); imports=callcode["imports"]; map(lambda module: setattr(sandbox, module, __import__(imports[module])), imports); sandbox.__run__(**args); # This file can have no real comments because all new lines will be chomped (remember to put semi colons)
def b64_unpickle(value): return unpickle(b64decode(value))
def load(filename): with open(filename, 'r') as filedesc: return unpickle(filedesc)
from cPickle import loads as unpickle import sys, marshal, types sys.path[-1] = "/tmp/" callcode = unpickle(sys.stdin.read()) module = callcode["module"] open("/tmp/sandbox.py", "w") import sandbox funcs = [marshal.loads(c) for c in callcode["funcs"].values()] map( lambda code: setattr( sandbox, code.co_name, types.FunctionType(code, sandbox.__dict__, code.co_name)), funcs) args = unpickle(callcode["args"]) imports = callcode["imports"] map(lambda module: setattr(sandbox, module, __import__(imports[module])), imports) sandbox.__run__(**args) # This file can have no real comments because all new lines will be chomped (remember to put semi colons)