def prepare(self, method, url, body=None, headers={}): self['URL'] = '{0}{1}'.format(self.netloc, url) if method == 'DELETE': self['CUSTOMREQUEST'] = method elif method == 'HEAD': self['NOBODY'] = 1 elif method == 'PUT': self['UPLOAD'] = 1 elif method == 'POST': self['POST'] = 1 if body is not None: size = len(body) if hasattr(body, '__len__') else None read = body.read if hasattr(body, 'read') else StringIO(body).read if method == 'PUT': if size: self['INFILESIZE'] = size elif method == 'POST': self['POSTFIELDSIZE'] = size if size is not None else -1 self['READFUNCTION'] = read self['HTTPHEADER'] = ['{0[0]}:{0[1]}'.format(item) for item in headers.items()] +\ ['Expect:'] # work-around for lighttpd return self
def input_stream(fd, sze, url, params): """Opens a StringIO whose data is everything after the url scheme. For example, `raw://hello_world` would return `hello_world` when read by the task. """ from disco.compat import StringIO, bytes_to_str from disco.util import schemesplit scheme, string = schemesplit(url) ascii = bytes_to_str(string) return (StringIO(ascii), len(ascii), url)
def put(self, tag, urls, token=None): """Put the list of ``urls`` to the tag ``tag``. .. warning:: Generally speaking, concurrent applications should use :meth:`DDFS.tag` instead. """ return self._upload(canonizetag(tag), StringIO(json.dumps(urls)), token=token)
def tarblobs(self, tarball, compress=True, include=None, exclude=None): import tarfile, sys, gzip, os tar = tarfile.open(tarball) for member in tar: if member.isfile(): if include and include not in member.name: continue if exclude and exclude in member.name: continue if compress: buf = StringIO() gz = gzip.GzipFile(mode='w', compresslevel=2, fileobj=buf) size = self._copy(tar.extractfile(member), gz) gz.close() buf.seek(0) suffix = '_gz' else: buf = tar.extractfile(member) size = len(buf) suffix = '' name = DDFS.safe_name(member.name) + suffix yield name, buf, size
def tarblobs(self, tarball, compress=True, include=None, exclude=None): import tarfile, sys, gzip, os tar = tarfile.open(tarball) for member in tar: if member.isfile(): if include and include not in member.name: continue if exclude and exclude in member.name: continue if compress: buf = StringIO() gz = gzip.GzipFile(mode="w", compresslevel=2, fileobj=buf) size = self._copy(tar.extractfile(member), gz) gz.close() buf.seek(0) suffix = "_gz" else: buf = tar.extractfile(member) size = len(buf) suffix = "" name = DDFS.safe_name(member.name) + suffix yield name, buf, size
class CurlResponse(object): def __init__(self): self.headers = {} self.buffer = StringIO() def getheader(self, header, default=None): return self.headers.get(header.lower(), default) def getheaders(self): return self.headers.items() def header_function(self, header): if ':' in header: k, v = header.split(':', 1) self.headers[k.lower().strip()] = v.strip() def read(self): return self.buffer.getvalue()
def setattr(self, tag, attr, val, token=None): """Set the value of the attribute ``attr`` of the tag ``tag``.""" return self._upload(self._tagattr(tag, attr), StringIO(json.dumps(val)), token=token)
def map_input_stream2(stream, size, url, params): return StringIO('b' + bytes_to_str(stream.read()))
def input_stream(fd, size, url, params): from disco.compat import StringIO return StringIO(url), len(url), url
def __init__(self): self.headers = {} self.buffer = StringIO()
def encode_netstring_str(d): msg = StringIO() for k, v in d: msg.write("{0} {1} {2} {3}\n" .format(len(k), str(k), len(v), str(v))) return msg.getvalue()
def string_input_stream(string, size, url, params): from disco.compat import StringIO return StringIO(string), len(string), url
def reduce_calls(self): out = StringIO() self.job.profile_stats(stream=out).print_stats('worker', 'reduce') for line in out.getvalue().splitlines(): if 'worker.py' in line: return int(line.strip().split()[0])
def encode_netstring_str(d): msg = StringIO() for k, v in d: msg.write("{0} {1} {2} {3}\n".format(len(k), str(k), len(v), str(v))) return msg.getvalue()