def archive_to_repo(archive_path, repo, archive_type="tar"): """Downloads a archive from the specified path, extracts it into the repo's directory, commits any changes from the previous version and pushes it to github!!!!""" # Download the tarball and stick it in a tempfile r = requests.get(archive_path) tmp = SpooledTemporaryFile() tmp.write(r.content) tmp.seek(0) # Open the tempfile contents as an actual tarball if archive_type == "tar": archive = tarfile.open(fileobj=tmp) elif archive_type == "zip": archive = zipfile.open(tmp) else: raise ValueError("Unrecognized Archive Type") # Clear working files clear_working_dir(repo.working_dir) # Extract to the repo path archive.extract(repo.working_dir) # Add and commit everything! try: repo.git.add(".", A=True) repo.git.commit(m="New archive version") except: pass # May be that there was nothing new to commit # Cleanup, outta here! archive.close() tmp.close()
def generate_thumbnail(self, content): content = file_from_content(content) try: uploaded_image = Image.open(content) except Exception: flask.abort(400, ValidationError( { 'message': 'File Format', 'object': {"error": "Format Incorrect"}, } )) if max(uploaded_image.size) >= self.max_size: uploaded_image.thumbnail((self.max_size, self.max_size), Image.BILINEAR) content = SpooledTemporaryFile(INMEMORY_FILESIZE) uploaded_image.save(content, uploaded_image.format) content.seek(0) thumbnail = uploaded_image.copy() thumbnail.thumbnail(self.thumbnail_size, Image.ANTIALIAS) thumbnail = thumbnail.convert('RGBA') thumbnail.format = self.thumbnail_format output = SpooledTemporaryFile(INMEMORY_FILESIZE) thumbnail.save(output, self.thumbnail_format) output.seek(0) thumb_path, thumb_id = self.store_content(output, 'thumb.%s' % self.thumbnail_format.lower()) self['thumb_id'] = thumb_id self['thumb_path'] = thumb_path thumbnail_file = self.thumb_file self['_thumb_public_url'] = thumbnail_file.public_url content.close()
def generate_thumbnail(self, content): content = file_from_content(content) uploaded_image = Image.open(content) if max(uploaded_image.size) >= self.max_size: uploaded_image.thumbnail((self.max_size, self.max_size), Image.BILINEAR) content = SpooledTemporaryFile(INMEMORY_FILESIZE) uploaded_image.save(content, uploaded_image.format) content.seek(0) thumbnail = uploaded_image.copy() thumbnail.thumbnail(self.thumbnail_size, Image.ANTIALIAS) thumbnail = thumbnail.convert('RGBA') thumbnail.format = self.thumbnail_format output = SpooledTemporaryFile(INMEMORY_FILESIZE) thumbnail.save(output, self.thumbnail_format) output.seek(0) thumb_path, thumb_id = self.store_content(output, 'thumb.%s' % self.thumbnail_format.lower()) self['thumb_id'] = thumb_id self['thumb_path'] = thumb_path thumbnail_file = self.thumb_file self['_thumb_public_url'] = thumbnail_file.public_url content.close()
def _do_execute_direct(self, code): shell = builtins.__xonsh_shell__ env = builtins.__xonsh_env__ out = io.StringIO() err = io.StringIO() enc = env.get('XONSH_ENCODING') out = SpooledTemporaryFile(max_size=MAX_SIZE, mode='w+t', encoding=enc, newline='\n') err = SpooledTemporaryFile(max_size=MAX_SIZE, mode='w+t', encoding=enc, newline='\n') try: with redirect_stdout(out), redirect_stderr(err), \ swap(builtins, '__xonsh_stdout_uncaptured__', out), \ swap(builtins, '__xonsh_stderr_uncaptured__', err), \ env.swap({'XONSH_STORE_STDOUT': False}): shell.default(code) interrupted = False except KeyboardInterrupt: interrupted = True output, error = '', '' if out.tell() > 0: out.seek(0) output = out.read() if err.tell() > 0: err.seek(0) error = err.read() out.close() err.close() return output, error, interrupted
def run(self, opts): from lzma.xz import compress self.h = sha1() tdir = mkdtemp("calibre-mathjax-build") try: src = opts.path_to_mathjax or self.download_mathjax_release(tdir, opts.mathjax_url) self.info("Compressing MathJax...") t = SpooledTemporaryFile() with ZipFile(t, "w", ZIP_STORED) as zf: self.add_file(zf, self.j(src, "unpacked", "MathJax.js"), "MathJax.js") self.add_tree( zf, self.j(src, "fonts", "HTML-CSS", self.FONT_FAMILY, "woff"), "fonts/HTML-CSS/%s/woff" % self.FONT_FAMILY, ) for d in "extensions jax/element jax/input jax/output/CommonHTML".split(): self.add_tree(zf, self.j(src, "unpacked", *d.split("/")), d) zf.comment = self.h.hexdigest() t.seek(0) with open(self.j(self.RESOURCES, "content-server", "mathjax.zip.xz"), "wb") as f: compress(t, f, level=9) with open(self.j(self.RESOURCES, "content-server", "mathjax.version"), "wb") as f: f.write(zf.comment) finally: shutil.rmtree(tdir)
def create_dump(self): if not self.connection.is_usable(): self.connection.connect() dump_file = SpooledTemporaryFile(max_size=10 * 1024 * 1024) self._write_dump(dump_file) dump_file.seek(0) return dump_file
def process_content(self, content, filename=None, content_type=None): orig_content = content content = utils.file_from_content(content) __, filename, content_type = FileStorage.fileinfo(orig_content) uploaded_image = Image.open(content) if max(uploaded_image.size) >= self.max_size: uploaded_image.thumbnail((self.max_size, self.max_size), Image.BILINEAR) content = SpooledTemporaryFile(INMEMORY_FILESIZE) uploaded_image.save(content, uploaded_image.format) content.seek(0) super(UploadedImageWithThumb, self).process_content(content, filename, content_type) thumbnail = uploaded_image.copy() thumbnail.thumbnail(self.thumbnail_size, Image.ANTIALIAS) thumbnail = thumbnail.convert('RGBA') thumbnail.format = self.thumbnail_format output = SpooledTemporaryFile(INMEMORY_FILESIZE) thumbnail.save(output, self.thumbnail_format) output.seek(0) thumb_path, thumb_id = self.store_content( output, 'thumb.%s' % self.thumbnail_format.lower()) self['thumb_id'] = thumb_id self['thumb_path'] = thumb_path thumbnail_file = self.thumb_file self['_thumb_public_url'] = thumbnail_file.public_url
def graph(self): if self.code is '': return None stdin = SpooledTemporaryFile() stdout = SpooledTemporaryFile() stdin.write('@startuml\n') stdin.write(self.code) stdin.write('@enduml\n') stdin.seek(0) args = [ self.java, '-jar', self.jar, '-p', '-tdot', ] p = Popen(args, stdin=stdin, stdout=stdout) if p.wait() != 0: return None stdout.seek(0) graph = stdout.read() return graph_from_dot_data(graph)
def run_cmd(self, extra): """ :param extra: :type extra: """ test_cmd = self.cmd[:] test_cmd.extend(["-T", self.target]) test_cmd.extend(["-P", self.targetpath]) if extra is not None: test_cmd.extend(extra.split(" ")) print(" ".join(test_cmd)) for trial in range(0, self.trials): print("Trial %s" % trial) tfile = SpooledTemporaryFile() error_code = subprocess.call(test_cmd, stdout=tfile, stderr=subprocess.STDOUT) if error_code == 0: break if error_code != 0: tfile.seek(0) print(tfile.read()) self.assertEqual(error_code, 0)
def sponge( *, filename: str, append: bool = False, tee: bool = False, max_memory_size: int, debug: bool, ) -> None: """soak up all input from stdin and write it to""" from tempfile import SpooledTemporaryFile with contextlib.ExitStack() as s: tmpio = SpooledTemporaryFile(max_size=max_memory_size, mode="w+", prefix="sponge.") for line in sys.stdin: tmpio.write(line) has_file = bool(tmpio.name is not None) if has_file: logger.info("sponge: using temporary file (%s)", tmpio.name) tmpio = s.enter_context(tmpio) mode = "a" if append else "w" with open(filename, mode) as wf: tmpio.seek(0) if tee: for line in tmpio: wf.write(line) sys.stdout.write(line) else: for line in tmpio: wf.write(line)
class GoogleCloudFile(File): def __init__(self, name, mode, storage): self.name = name self.mime_type = mimetypes.guess_type(name)[0] self._mode = mode self._storage = storage # NOTE(mattrobenolt): This is the same change in behavior as in # the s3 backend. We're opting now to load the file # or metadata at this step. This means we won't actually # know a file doesn't exist until we try to read it. self.blob = FancyBlob(storage.download_url, self.name, storage.bucket) self._file = None self._is_dirty = False @property def size(self): return self.blob.size def _get_file(self): if self._file is None: with metrics.timer('filestore.read', instance='gcs'): self._file = SpooledTemporaryFile( max_size=self._storage.max_memory_size, suffix=".GSStorageFile", dir=None, ) if 'r' in self._mode: self._is_dirty = False self.blob.download_to_file(self._file) self._file.seek(0) return self._file def _set_file(self, value): self._file = value file = property(_get_file, _set_file) def read(self, num_bytes=None): if 'r' not in self._mode: raise AttributeError("File was not opened in read mode.") if num_bytes is None: num_bytes = -1 return super(GoogleCloudFile, self).read(num_bytes) def write(self, content): if 'w' not in self._mode: raise AttributeError("File was not opened in write mode.") self._is_dirty = True return super(GoogleCloudFile, self).write(force_bytes(content)) def close(self): if self._file is not None: if self._is_dirty: self.file.seek(0) self.blob.upload_from_file(self.file, content_type=self.mime_type) self._file.close() self._file = None
class RedisFile(File): def __init__(self, name, storage): self.name = name self._storage = storage self._file = None def _get_file(self): if self._file is None: self._file = SpooledTemporaryFile() # get from redis content = self._storage.client.get(self.name) # stored as base64 .. decode content = base64.b64decode(content) with io.BytesIO(content) as file_content: copyfileobj(file_content, self._file) self._file.seek(0) return self._file def _set_file(self, value): self._file = value file = property(_get_file, _set_file)
def _send_boto(self, boto, resp, request): conn = boto.connect_s3() bucket_name, key_string = url_to_s3_info(request.url) # Get the bucket without validation that it exists and that we have # permissions to list its contents. bucket = conn.get_bucket(bucket_name, validate=False) try: key = bucket.get_key(key_string) except boto.exception.S3ResponseError as exc: # This exception will occur if the bucket does not exist or if the # user does not have permission to list its contents. resp.status_code = 404 resp.raw = exc return resp if key and key.exists: modified = key.last_modified content_type = key.content_type or "text/plain" resp.headers = CaseInsensitiveDict({ "Content-Type": content_type, "Content-Length": key.size, "Last-Modified": modified, }) fh = SpooledTemporaryFile() key.get_contents_to_file(fh) fh.seek(0) resp.raw = fh resp.close = resp.raw.close else: resp.status_code = 404
def _open(self, name, mode='rb'): logger().debug("name: %s, mode: %s", name, mode) if mode != "rb": raise ValueError("OSS files can only be opened in read-only mode") target_name = self._get_key_name(name) logger().debug("target name: %s", target_name) try: # Load the key into a temporary file tmpf = SpooledTemporaryFile(max_size=10 * 1024 * 1024) # 10MB obj = self.bucket.get_object(target_name) logger().info("content length: %d, requestid: %s", obj.content_length, obj.request_id) if obj.content_length is None: shutil.copyfileobj(obj, tmpf) else: oss2.utils.copyfileobj_and_verify(obj, tmpf, obj.content_length, request_id=obj.request_id) tmpf.seek(0) return OssFile(tmpf, target_name, self) except oss2.exceptions.NoSuchKey: raise OssError("%s does not exist" % name) except: raise OssError("Failed to open %s" % name)
def send(self, request, stream=None, timeout=None, verify=None, cert=None, proxies=None): pathname = url_to_path(request.url) resp = Response() resp.status_code = 200 resp.url = request.url try: stats = lstat(pathname) except (IOError, OSError) as exc: resp.status_code = 404 message = { "error": "file does not exist", "path": pathname, "exception": repr(exc), } fh = SpooledTemporaryFile() fh.write(ensure_binary(json.dumps(message))) fh.seek(0) resp.raw = fh resp.close = resp.raw.close else: modified = formatdate(stats.st_mtime, usegmt=True) content_type = guess_type(pathname)[0] or "text/plain" resp.headers = CaseInsensitiveDict({ "Content-Type": content_type, "Content-Length": stats.st_size, "Last-Modified": modified, }) resp.raw = open(pathname, "rb") resp.close = resp.raw.close return resp
def runTest(path,connectivity='Native',location='firefox'): #Note that timeout is a global maxiumum and includes queuing time! #it does not relate to how long the test runs #TODO Error Handling args = [ 'webpagetest', 'test', path, '--server', server, '--key', key, '--location', location, '--runs', '1', '--connectivity', connectivity, '--label',path, '--keepua', #Don't change the useragent to indicate this is a bot '--first', #Don't try for a repeat view '--poll','5' #How frequently to poll the web server for the result #,'--timeout',str(timeout) ] outT = SpooledTemporaryFile(mode='w+') #We can specify the size of the memory buffer here if we need. #Stops us hitting the buffer limit if use pipe. #cmd = "" #for arg in args: # cmd = cmd + arg + ' ' #print(cmd) result = run(args,stdout=outT,bufsize=4096,check=True) outT.seek(0) #Have to return to the start of the file to read it. result = outT.read() outT.close() output = loads(result) #String to JSON return output
def generate_thumbnail(self, content): content = file_from_content(content) uploaded_image = Image.open(content) if max(uploaded_image.size) >= self.max_size: uploaded_image.thumbnail((self.max_size, self.max_size), Image.BILINEAR) content = SpooledTemporaryFile(INMEMORY_FILESIZE) uploaded_image.save(content, uploaded_image.format) content.seek(0) thumbnail = uploaded_image.copy() thumbnail.thumbnail(self.thumbnail_size, Image.ANTIALIAS) thumbnail = thumbnail.convert('RGBA') thumbnail.format = self.thumbnail_format output = SpooledTemporaryFile(INMEMORY_FILESIZE) thumbnail.save(output, self.thumbnail_format) output.seek(0) thumb_path, thumb_id = self.store_content( output, 'thumb.%s' % self.thumbnail_format.lower()) self['thumb_id'] = thumb_id self['thumb_path'] = thumb_path thumbnail_file = self.thumb_file self['_thumb_public_url'] = thumbnail_file.public_url content.close()
def submitTest(job,server,key): location = rowToLocation(job) print(location +' '+job['script']) args = [ 'webpagetest', 'test', job['script'], '--server', server, '--key', key, '--location', location, '--runs', '1', '--connectivity', 'Native', '--label',job['script'], '--keepua', #Don't change the useragent to indicate this is a bot '--first', #Don't try for a repeat view ] outT = SpooledTemporaryFile(mode='w+') #We can specify the size of the memory buffer here if we need. #Stops us hitting the buffer limit if use pipe. #cmd = "" #for arg in args: # cmd = cmd + arg + ' ' #print(cmd) result = run(args,stdout=outT,bufsize=4096,check=True) outT.seek(0) #Have to return to the start of the file to read it. result = outT.read() outT.close() output = loads(result) #String to JSON return output
def download_slack_file(file_id: str, slack_client) -> Tuple[str, SpooledTemporaryFile]: slack_file = slack_client.api_call('files.info', file=file_id) url = slack_file.get('file').get('url_private') file_name = os.path.basename(urlparse(url).path) file_type = guess_type(url)[0] max_chunk = int(1e6) # 1MiB tmp = SpooledTemporaryFile(suffix=file_name, mode='w+b', max_size=max_chunk) headers = { 'user-agent': 'github.com/austinpray/kaori', 'Authorization': f'Bearer {slack_client.token}', } with requests.get(url, headers=headers, stream=True) as resp: if resp.status_code != 200: raise RuntimeError('non-200 on image') content_type = resp.headers['content-type'] if file_type not in content_type: raise RuntimeError( f'wrong filetype {content_type}, expected {file_type}') for chunk in resp.iter_content(chunk_size=max_chunk): tmp.write(chunk) tmp.seek(0) return file_name, tmp
def _wrapper(self, target, filename, extra=None): """ Common execution wrapper """ test_cmd = self.cmd[:] test_cmd.extend(["-T", target]) test_cmd.extend(["-P", self.target]) test_cmd.extend(["-i", filename]) test_cmd.extend(["-O", self.filenames[0]]) if extra is not None: test_cmd.extend(extra.split(' ')) print(" ".join(test_cmd)) for trial in range(0, self.trials): print("Trial %s" % trial) tfile = SpooledTemporaryFile() error_code = subprocess.call(test_cmd, stdout=tfile, stderr=subprocess.STDOUT) if error_code == 0: break if error_code != 0: tfile.seek(0) print(tfile.read()) self.assertEqual(error_code, 0)
class GoogleCloudFile(File): def __init__(self, name, mode, storage): self.name = name self.mime_type = mimetypes.guess_type(name)[0] self._mode = mode self._storage = storage self.blob = storage.bucket.get_blob(name) if not self.blob and 'w' in mode: self.blob = Blob(self.name, storage.bucket, chunk_size=storage.blob_chunk_size) self._file = None self._is_dirty = False @property def size(self): return self.blob.size def _get_file(self): if self._file is None: self._file = SpooledTemporaryFile( max_size=self._storage.max_memory_size, suffix=".GSStorageFile", dir=setting("FILE_UPLOAD_TEMP_DIR")) if 'r' in self._mode: self._is_dirty = False self.blob.download_to_file(self._file) self._file.seek(0) return self._file def _set_file(self, value): self._file = value file = property(_get_file, _set_file) def read(self, num_bytes=None): if 'r' not in self._mode: raise AttributeError("File was not opened in read mode.") if num_bytes is None: num_bytes = -1 return super().read(num_bytes) def write(self, content): if 'w' not in self._mode: raise AttributeError("File was not opened in write mode.") self._is_dirty = True return super().write(force_bytes(content)) def close(self): if self._file is not None: if self._is_dirty: self.blob.upload_from_file( self.file, rewind=True, content_type=self.mime_type, predefined_acl=self._storage.default_acl) self._file.close() self._file = None
def process_content(self, content, filename=None, content_type=None): orig_content = content content = utils.file_from_content(content) __, filename, content_type = FileStorage.fileinfo(orig_content) uploaded_image = Image.open(content) if max(uploaded_image.size) >= self.max_size: uploaded_image.thumbnail((self.max_size, self.max_size), Image.BILINEAR) content = SpooledTemporaryFile(INMEMORY_FILESIZE) uploaded_image.save(content, uploaded_image.format) content.seek(0) super(UploadedImageWithThumb, self).process_content(content, filename, content_type) thumbnail = uploaded_image.copy() thumbnail.thumbnail(self.thumbnail_size, Image.ANTIALIAS) thumbnail = thumbnail.convert('RGBA') thumbnail.format = self.thumbnail_format output = SpooledTemporaryFile(INMEMORY_FILESIZE) thumbnail.save(output, self.thumbnail_format) output.seek(0) thumb_path, thumb_id = self.store_content(output, 'thumb.%s' % self.thumbnail_format.lower()) self['thumb_id'] = thumb_id self['thumb_path'] = thumb_path thumbnail_file = self.thumb_file self['_thumb_public_url'] = thumbnail_file.public_url
def send(self, request, stream=None, timeout=None, verify=None, cert=None, proxies=None): pathname = url_to_path(request.url) resp = Response() resp.status_code = 200 resp.url = request.url try: stats = stat(pathname) except (IOError, OSError) as exc: resp.status_code = 404 message = { "error": "file does not exist", "path": pathname, "exception": repr(exc), } fh = SpooledTemporaryFile() fh.write(ensure_binary(json.dumps(message))) fh.seek(0) resp.raw = fh resp.close = resp.raw.close else: modified = formatdate(stats.st_mtime, usegmt=True) content_type = guess_type(pathname)[0] or "text/plain" resp.headers = CaseInsensitiveDict({ "Content-Type": content_type, "Content-Length": stats.st_size, "Last-Modified": modified, }) resp.raw = open(pathname, "rb") resp.close = resp.raw.close return resp
def string2spool(input_string): """Takes a string as an argument and returns an open file handle with the contents of the string""" file_object=SpooledTemporaryFile() file_object.write(input_string) file_object.seek(0) return file_object
def run(self, opts): from lzma.xz import compress self.h = sha1() tdir = mkdtemp('calibre-mathjax-build') try: src = opts.path_to_mathjax or self.download_mathjax_release( tdir, opts.mathjax_url) self.info('Compressing MathJax...') t = SpooledTemporaryFile() with ZipFile(t, 'w', ZIP_STORED) as zf: self.add_file(zf, self.j(src, 'unpacked', 'MathJax.js'), 'MathJax.js') self.add_tree( zf, self.j(src, 'fonts', 'HTML-CSS', self.FONT_FAMILY, 'woff'), 'fonts/HTML-CSS/%s/woff' % self.FONT_FAMILY) for d in 'extensions jax/element jax/input jax/output/CommonHTML'.split( ): self.add_tree(zf, self.j(src, 'unpacked', *d.split('/')), d) zf.comment = self.h.hexdigest() t.seek(0) with open( self.j(self.RESOURCES, 'content-server', 'mathjax.zip.xz'), 'wb') as f: compress(t, f, level=1 if is_travis else 9) with open( self.j(self.RESOURCES, 'content-server', 'mathjax.version'), 'wb') as f: f.write(zf.comment) finally: shutil.rmtree(tdir)
def __init__(self, data=None, fp=None, length=-1): assert bool(data is not None) ^ bool(fp) if length == -1: if data is not None: length = len(data) else: length = get_size(fp) # can be -1 # We allow writer reuse, but if we're working with a stream, we cannot # seek. Copy the data to a tempfile. if fp and not can_seek(fp): newfp = SpooledTemporaryFile(MAX_INMEMORY_SIZE) sendfile(newfp, fp) length = newfp.tell() newfp.seek(0) fp = newfp self.data = data self.fp = fp self.fpreads = 0 # keep track of fp usage self.length = length assert length >= 0 self.use_tempfile = length > MAX_INMEMORY_SIZE
class DropBoxFile(File): def __init__(self, name, storage): self.name = name self._storage = storage self._file = None def _get_file(self): if self._file is None: self._file = SpooledTemporaryFile() # As dropbox==9.3.0, the client returns a tuple # (dropbox.files.FileMetadata, requests.models.Response) file_metadata, response = \ self._storage.client.files_download(self.name) if response.status_code == 200: with BytesIO(response.content) as file_content: copyfileobj(file_content, self._file) else: # JIC the exception isn't catched by the dropbox client raise DropBoxStorageException( "Dropbox server returned a {} response when accessing {}" .format(response.status_code, self.name) ) self._file.seek(0) return self._file def _set_file(self, value): self._file = value file = property(_get_file, _set_file)
class InputStream(object): """ FCGI_STDIN or FCGI_DATA stream. Uses temporary file to store received data once max_mem bytes have been received. """ def __init__(self, max_mem=1024): self._file = SpooledTemporaryFile(max_mem) self._eof_received = Event() def feed(self, data): if self._eof_received.is_set(): raise IOError('Feeding file beyond EOF mark') if not data: # EOF mark self._file.seek(0) self._eof_received.set() else: self._file.write(data) def __iter__(self): self._eof_received.wait() return iter(self._file) def read(self, size=-1): self._eof_received.wait() return self._file.read(size) def readlines(self, sizehint=0): self._eof_received.wait() return self._file.readlines(sizehint) @property def eof_received(self): return self._eof_received.is_set()
def wrapper(self, target, oformat, instr, extra=None): """ Common execution wrapper """ self.filename = "%s.%s" % (self.filename, oformat) test_cmd = self.cmd[:] test_cmd.extend(["-T", target]) test_cmd.extend(["-P", self.target]) test_cmd.extend(["-O", self.filename]) test_cmd.extend(["-ins", instr]) if extra is not None: test_cmd.extend(extra.split(' ')) test_cmd = [elem for elem in test_cmd if elem != ""] print(" ".join(test_cmd)) for trial in range(0, self.trials): print("Trial %s" % trial) tfile = SpooledTemporaryFile() error_code = subprocess.call(test_cmd, stdout=tfile, stderr=subprocess.STDOUT) if error_code == 0: break if error_code != 0: tfile.seek(0) print(tfile.read()) self.assertEqual(error_code, 0) if oformat == "bin": print("Checking BIN...") test_cmd = [ os.path.join(BASEPATH, "targets", "generic", "tools", "mp_bin2objdump.py") ] test_cmd.extend(['-T', target]) test_cmd.extend(['-i', self.filename]) test_cmd.append("-S") tfile = SpooledTemporaryFile() error_code = subprocess.call(test_cmd, stdout=tfile, stderr=subprocess.STDOUT) if error_code != 0: tfile.seek(0) print(tfile.read()) self.assertEqual(error_code, 0)
def generate_qr_code(hyperlink): qr = segno.make_qr(hyperlink) f = SpooledTemporaryFile() qr.save(f, kind="png", scale=10) f.seek(0) res_io = io.BytesIO(f.read()) res_io.seek(0) return res_io
def ex(cmd): """Shell out a subprocess and return what it writes to stdout as a string""" in_mem_file = SpooledTemporaryFile(max_size=2048, mode="r+") subprocess.check_call(cmd, shell=True, stdout=in_mem_file) in_mem_file.seek(0) stdout = in_mem_file.read() in_mem_file.close() return stdout
def _shell_command(self, cmd): """Shell out a subprocess and return what it writes to stdout as a string""" in_mem_file = SpooledTemporaryFile(max_size=2048, mode="r+") check_call(cmd, shell=True, stdout=in_mem_file) in_mem_file.seek(0) stdout = in_mem_file.read() in_mem_file.close() return stdout
def _open(self, name, mode = 'rb') -> File: name = self._transform_name(name) content = self.service.get_blob_content(self.container, name) file = SpooledTemporaryFile() file.write(content) file.seek(0) # explicitly reset to allow reading from the beginning afterwards as-is return File(file)
def decode_bytes(bytes): temp = SpooledTemporaryFile() string = "" for byte in bytes: string = string + chr(byte) temp.write(string) temp.seek(0) # work around a stupid python bug return Decode(0, temp.read(), Decode64Bits)
def generate(self): points = self.points() self.buffer = 2*self.pad count = np.zeros([x + 2*self.buffer for x in self.expanded_size]) density = np.zeros([x + 2*self.buffer for x in self.expanded_size]) # Render the B&W density version of the heatmap dot_size = self.dot.shape[0] for x, y, weight in points: x1 = x + self.buffer - (dot_size - 1)/2 y1 = y + self.buffer - (dot_size - 1)/2 count[y1:(y1 + dot_size), x1:(x1 + dot_size)] += self.dot density[y1:(y1 + dot_size), x1:(x1+ dot_size)] += self.dot*float(weight) # Pick the field to map if gheat_settings.GHEAT_MAP_MODE == gheat_settings.GHEAT_MAP_MODE_COUNT: img = count #opacity = np.zeros(img.shape()) + 255 elif gheat_settings.GHEAT_MAP_MODE == gheat_settings.GHEAT_MAP_MODE_SUM_DENSITY: img = density #opacity = np.clip(count, 0, gheat_settings.GHEAT_OPACITY_LIMIT) elif gheat_settings.GHEAT_MAP_MODE == gheat_settings.GHEAT_MAP_MODE_MEAN_DENSITY: img = density img[count > 0] /= count[count > 0] #opacity = np.clip(count, 0, gheat_settings.GHEAT_OPACITY_LIMIT) else: raise ValueError, 'Unknown map mode' # Crop resulting density image (which could have grown) into the # actual canvas size we want img = img[(self.pad + self.buffer):(SIZE + self.pad + self.buffer), (self.pad + self.buffer):(SIZE + self.pad + self.buffer)] #opacity = opacity[self.pad:(SIZE + self.pad), self.pad:(SIZE + self.pad)] # Maybe use a logarithm img = np.where(img>0, np.log(img)+1, img) # Convert to a 0 to 255 image img = np.clip(256.0*np.power(img/gheat_settings.GHEAT_MAX_VALUE, gheat_settings.GHEAT_SCALING_COEFFICIENT), 0, 255.999).astype('uint8') # Given the B&W density image, generate a color heatmap based on # this Tile's color scheme. colour_image = np.zeros((SIZE, SIZE, 4), 'uint8') + 255 for i in range(3): colour_image[:,:,i] = self.schemeobj.colors[:,i][255 - img] colour_image[:,:,3] = np.where(img > gheat_settings.GHEAT_MIN_DENSITY, 255, 0) tmpfile = SpooledTemporaryFile() writer = png.Writer(SIZE, SIZE, alpha=True, bitdepth=8) writer.write(tmpfile, np.reshape(colour_image, (SIZE, SIZE*4))) tmpfile.seek(0) return tmpfile
def image(type, spec=' ', ext='png'): # Parameters for `suml`. import suml.common import optparse options = optparse.Values(({ 'scruffy': True, 'png': ext == 'png', 'svg': ext == 'svg' or ext == 'pdf', 'font': os.getenv('SCRUFFY_FONT', suml.common.defaultScruffyFont()), 'shadow': False, })) from tempfile import SpooledTemporaryFile fout = SpooledTemporaryFile() # Execute Scruffy `suml`. if type == 'class': suml.yuml2dot.transform(spec, fout, options) elif type == 'sequence': suml.suml2pic.transform(spec, fout, options) else: return HTTPError(404, 'Unhandled diagram type.') # Retrieve the data generated. fout.seek(0) data = fout.read() fout.close() # Convert SVG to PDF? if ext == 'pdf': # Load SVG file. doc = xml.dom.expatbuilder.parseString(data) # Convert to a RLG drawing svg_renderer = svglib.svglib.SvgRenderer() svg_renderer.render(doc.documentElement) drawing = svg_renderer.finish() # Generate PDF. data = reportlab.graphics.renderPDF.drawToString(drawing) # Server the generated image. if ext == 'png': response.content_type = 'image/png' elif ext == 'svg': response.content_type = 'image/svg+xml' elif ext == 'pdf': response.content_type = 'application/pdf' else: return HTTPError(500, 'Unhandled extension type.') return data
def file_from_content(content): f = content if isinstance(content, cgi.FieldStorage): f = content.file elif isinstance(content, byte_string): f = SpooledTemporaryFile(INMEMORY_FILESIZE) f.write(content) f.seek(0) return f
def test_run_command_stdin(self): connector = BaseCommandDBConnector() stdin = SpooledTemporaryFile() stdin.write(b'foo') stdin.seek(0) # Run stdout, stderr = connector.run_command('cat', stdin=stdin) self.assertEqual(stdout.read(), b'foo') self.assertFalse(stderr.read())
class GoogleCloudFile(File): def __init__(self, name, mode, storage): self.name = name self.mime_type = mimetypes.guess_type(name)[0] self._mode = mode self._storage = storage self.blob = storage.bucket.get_blob(name) if not self.blob and 'w' in mode: self.blob = Blob(self.name, storage.bucket) self._file = None self._is_dirty = False @property def size(self): return self.blob.size def _get_file(self): if self._file is None: self._file = SpooledTemporaryFile( max_size=self._storage.max_memory_size, suffix=".GSStorageFile", dir=setting("FILE_UPLOAD_TEMP_DIR", None) ) if 'r' in self._mode: self._is_dirty = False self.blob.download_to_file(self._file) self._file.seek(0) return self._file def _set_file(self, value): self._file = value file = property(_get_file, _set_file) def read(self, num_bytes=None): if 'r' not in self._mode: raise AttributeError("File was not opened in read mode.") if num_bytes is None: num_bytes = -1 return super(GoogleCloudFile, self).read(num_bytes) def write(self, content): if 'w' not in self._mode: raise AttributeError("File was not opened in write mode.") self._is_dirty = True return super(GoogleCloudFile, self).write(force_bytes(content)) def close(self): if self._file is not None: if self._is_dirty: self.file.seek(0) self.blob.upload_from_file(self.file, content_type=self.mime_type) self._file.close() self._file = None
def read_file_handle(self, filename): """Get the file data, put it in a SpooledTemporaryFile object for return and reading """ logger.debug("read_file_handle('%s')" % filename) read_buffer = SpooledTemporaryFile() response = self.rc.fs.read_file(read_buffer, filename) logger.debug(response) read_buffer.seek(0) return read_buffer
def load_object(obj, temp_dir): """ Load S3 object `obj` into SpooledTemporaryFile `body` stored in `temp_dir`. Return (obj, response, body). """ response = obj.get() body = SpooledTemporaryFile(SPOOLED_FILE_SIZE, dir=temp_dir) copyfileobj(response['Body'], body) body.seek(0) return obj, response, body
def local_export(self, exporter_id, transport, options=()): _logger.info("master: Exporting locally'") exporter = self._sa.manager.create_exporter(exporter_id, options) exported_stream = SpooledTemporaryFile(max_size=102400, mode='w+b') async_dump = rpyc.async_(exporter.dump_from_session_manager) res = async_dump(self._sa.manager, exported_stream) res.wait() exported_stream.seek(0) result = transport.send(exported_stream) return result
def filter_file(filter, filename, membuffer=10485760): tmp = SpooledTemporaryFile(max_size=membuffer) with open(filename) as input: for line in input: if filter(line): tmp.write(line) tmp.seek(0) with open(filename, "w") as output: for line in tmp: output.write(line)
def get_file(self, name): name = self._normalize_name(self._clean_name(name)) file = SpooledTemporaryFile( suffix=".S3Boto3StorageFile", dir='/tmp/' ) obj = self.bucket.Object(name) obj.download_fileobj(file) file.seek(0) return file
def _http_get(self, path: str) -> SpooledTemporaryFile: url = urllib.parse.urljoin(self.repo_url.rstrip("/") + "/", path) logger.debug("Fetching %s", url) response = self._session.get(url, headers=HEADERS) buffer = SpooledTemporaryFile(max_size=100 * 1024 * 1024) for chunk in response.iter_content(chunk_size=10 * 1024 * 1024): buffer.write(chunk) buffer.flush() buffer.seek(0) return buffer
def get_empty(self, opacity=OPAQUE): color = self.colors[255,:] #color[3] = int(color[3]*float(opacity)/255) color[3] = 0 empty = np.tile(color, SIZE*SIZE).reshape(SIZE, SIZE*4) tmpfile = SpooledTemporaryFile() writer = png.Writer(SIZE, SIZE, alpha=True, bitdepth=8) writer.write(tmpfile, empty) tmpfile.seek(0) return tmpfile
def pipestring_process(cmd_string, stdin_string=''): """Pipe a python string to standard input for cmd_string >>> pipestring_process('grep 2', '1\\n2\\n3\\n') (0, '2\\n', '') """ f=SpooledTemporaryFile() f.write(stdin_string) f.seek(0) results=process(cmd_string, stdin=f) f.close() return results
def do_execute(self, code, silent, store_history=True, user_expressions=None, allow_stdin=False): """Execute user code.""" if len(code.strip()) == 0: return {'status': 'ok', 'execution_count': self.execution_count, 'payload': [], 'user_expressions': {}} env = builtins.__xonsh_env__ shell = builtins.__xonsh_shell__ hist = builtins.__xonsh_history__ enc = env.get('XONSH_ENCODING') out = SpooledTemporaryFile(max_size=MAX_SIZE, mode='w+t', encoding=enc, newline='\n') err = SpooledTemporaryFile(max_size=MAX_SIZE, mode='w+t', encoding=enc, newline='\n') try: with redirect_stdout(out), redirect_stderr(err), \ swap(builtins, '__xonsh_stdout_uncaptured__', out), \ swap(builtins, '__xonsh_stderr_uncaptured__', err), \ env.swap({'XONSH_STORE_STDOUT': False}): shell.default(code) interrupted = False except KeyboardInterrupt: interrupted = True if not silent: # stdout response if out.tell() > 0: out.seek(0) self._respond_in_chunks('stdout', out.read()) if err.tell() > 0: err.seek(0) self._respond_in_chunks('stderr', err.read()) if hasattr(builtins, '_') and builtins._ is not None: # rely on sys.displayhook functionality self._respond_in_chunks('stdout', pformat(builtins._)) builtins._ = None if len(hist) > 0 and out.tell() == 0 and err.tell() == 0: self._respond_in_chunks('stdout', hist.outs[-1]) out.close() err.close() if interrupted: return {'status': 'abort', 'execution_count': self.execution_count} rtn = 0 if len(hist) == 0 else hist.rtns[-1] if 0 < rtn: message = {'status': 'error', 'execution_count': self.execution_count, 'ename': '', 'evalue': str(rtn), 'traceback': []} else: message = {'status': 'ok', 'execution_count': self.execution_count, 'payload': [], 'user_expressions': {}} return message
class DropBoxFile(File): def __init__(self, name, storage): self.name = name self._storage = storage @property def file(self): if not hasattr(self, '_file'): response = self._storage.client.get_file(self.name) self._file = SpooledTemporaryFile() copyfileobj(response, self._file) self._file.seek(0) return self._file
def generate(self): points = self.points() # Grab a new PIL image canvas img = Image.new('RGBA', self.expanded_size, 'white') # Render the B&W density version of the heatmap for x,y in points: dot_placed = Image.new('RGBA', self.expanded_size, 'white') dot_placed.paste(self.dot, (x, y)) img = ImageChops.multiply(img, dot_placed) # Crop resulting density image (which could have grown) into the # actual canvas size we want img = img.crop((self.pad, self.pad, SIZE+self.pad, SIZE+self.pad)) img = ImageChops.duplicate(img) # converts ImageCrop => Image # Given the B&W density image, generate a color heatmap based on # this Tile's color scheme. _computed_opacities = dict() pix = img.load() # Image => PixelAccess for x in range(SIZE): for y in range(SIZE): # Get color for this intensity # ============================ # is a value val = self.schemeobj.colors[0, pix[x,y][0]] try: pix_alpha = val[3] # the color image has transparency except IndexError: pix_alpha = OPAQUE # it doesn't # Blend the opacities # =================== conf, pixel = self.opacity, pix_alpha if (conf, pixel) not in _computed_opacities: opacity = int(( (conf/255.0) # from configuration * (pixel/255.0) # from per-pixel alpha ) * 255) _computed_opacities[(conf, pixel)] = opacity pix[x,y] = val[:3] + (_computed_opacities[(conf, pixel)],) tmpfile = SpooledTemporaryFile() img.save(tmpfile, 'PNG') tmpfile.seek(0) return tmpfile
def do_execute(self, code, silent, store_history=True, user_expressions=None, allow_stdin=False): """Execute user code.""" if len(code.strip()) == 0: return {"status": "ok", "execution_count": self.execution_count, "payload": [], "user_expressions": {}} env = builtins.__xonsh_env__ shell = builtins.__xonsh_shell__ hist = builtins.__xonsh_history__ enc = env.get("XONSH_ENCODING") out = SpooledTemporaryFile(max_size=MAX_SIZE, mode="w+t", encoding=enc, newline="\n") err = SpooledTemporaryFile(max_size=MAX_SIZE, mode="w+t", encoding=enc, newline="\n") try: with redirect_stdout(out), redirect_stderr(err), swap(builtins, "__xonsh_stdout_uncaptured__", out), swap( builtins, "__xonsh_stderr_uncaptured__", err ), env.swap({"XONSH_STORE_STDOUT": False}): shell.default(code) interrupted = False except KeyboardInterrupt: interrupted = True if not silent: # stdout response if out.tell() > 0: out.seek(0) self._respond_in_chunks("stdout", out.read()) if err.tell() > 0: err.seek(0) self._respond_in_chunks("stderr", err.read()) if hasattr(builtins, "_") and builtins._ is not None: # rely on sys.displayhook functionality self._respond_in_chunks("stdout", pformat(builtins._)) builtins._ = None if len(hist) > 0 and out.tell() == 0 and err.tell() == 0: self._respond_in_chunks("stdout", hist.outs[-1]) out.close() err.close() if interrupted: return {"status": "abort", "execution_count": self.execution_count} rtn = 0 if len(hist) == 0 else hist.rtns[-1] if 0 < rtn: message = { "status": "error", "execution_count": self.execution_count, "ename": "", "evalue": str(rtn), "traceback": [], } else: message = {"status": "ok", "execution_count": self.execution_count, "payload": [], "user_expressions": {}} return message
class TempInput(object): def __init__(self, inputstr): self.inputstr = inputstr def __enter__(self): self.tempfile = SpooledTemporaryFile() self.tempfile.write(self.inputstr) self.tempfile.seek(0) return self.tempfile def __exit__(self, type_, value, traceback): self.tempfile.close() return False
def _buffer_response(status_headers, iterator): out = SpooledTemporaryFile(ProxyRouter.BUFF_RESPONSE_MEM_SIZE) size = 0 for buff in iterator: size += len(buff) out.write(buff) content_length_str = str(size) # remove existing content length status_headers.replace_header('Content-Length', content_length_str) out.seek(0) return RewriteContent.stream_to_gen(out)