def test_smbserver_get_file(self): """Test reading files from a shared folder. """ server = self.get_smbserver() self.start_smbserver(server) client = self.get_smbclient() # Check unauthenticated get local_file = BytesIO() with assertRaisesRegex(self, SessionError, "STATUS_ACCESS_DENIED"): client.getFile(self.share_name, self.share_file, local_file.write) # Check authenticated get local_file = BytesIO() client.login(self.username, self.password) client.getFile(self.share_name, self.share_file, local_file.write) local_file.seek(0) self.assertEqual(local_file.read(), b(self.share_new_content)) # Check path traversal in get as in #1066 local_file = BytesIO() with assertRaisesRegex(self, SessionError, "STATUS_OBJECT_PATH_SYNTAX_BAD"): client.getFile(self.share_name, join("..", self.share_unjailed_file), local_file.write) local_file.seek(0) self.assertEqual(local_file.read(), b("")) # Check unexistent get file with assertRaisesRegex(self, SessionError, "STATUS_NO_SUCH_FILE"): client.getFile(self.share_name, "unexistent", local_file.write) client.close()
def output_properties(path=None, content=None, basename=None, pseduo_location=False): checksum = hashlib.sha1() properties = { "class": "File", } if path is not None: properties["path"] = path f = open(path, "rb") else: f = BytesIO(content) try: contents = f.read(1024 * 1024) filesize = 0 while contents: checksum.update(contents) filesize += len(contents) contents = f.read(1024 * 1024) finally: f.close() properties["checksum"] = "sha1$%s" % checksum.hexdigest() properties["size"] = filesize set_basename_and_derived_properties(properties, basename) _handle_pseudo_location(properties, pseduo_location) return properties
class TFramedTransport(TTransportBase, CReadableTransport): """Class that wraps another transport and frames its I/O when writing.""" def __init__(self, trans,): self.__trans = trans self.__rbuf = BytesIO() self.__wbuf = BytesIO() def isOpen(self): return self.__trans.isOpen() def open(self): return self.__trans.open() def close(self): return self.__trans.close() def read(self, sz): ret = self.__rbuf.read(sz) if len(ret) != 0: return ret self.readFrame() return self.__rbuf.read(sz) def readFrame(self): buff = self.__trans.readAll(4) sz, = unpack('!i', buff) self.__rbuf = BytesIO(self.__trans.readAll(sz)) def write(self, buf): self.__wbuf.write(buf) def flush(self): wout = self.__wbuf.getvalue() wsz = len(wout) # reset wbuf before write/flush to preserve state on underlying failure self.__wbuf = BytesIO() # N.B.: Doing this string concatenation is WAY cheaper than making # two separate calls to the underlying socket object. Socket writes in # Python turn out to be REALLY expensive, but it seems to do a pretty # good job of managing string buffer operations without excessive copies buf = pack("!i", wsz) + wout self.__trans.write(buf) self.__trans.flush() # Implement the CReadableTransport interface. @property def cstringio_buf(self): return self.__rbuf def cstringio_refill(self, prefix, reqlen): # self.__rbuf will already be empty here because fastbinary doesn't # ask for a refill until the previous buffer is empty. Therefore, # we can start reading new frames immediately. while len(prefix) < reqlen: self.readFrame() prefix += self.__rbuf.getvalue() self.__rbuf = BytesIO(prefix) return self.__rbuf
def _onReceive(self, data): """Event handler when data from the display is received Formats a packet of four bytes in a packet type and three arguments. If the packet is known to have a payload, this is also fetched and the checksum is verified. The constructed packet is handed off to L{_handlePacket}. """ if self.isUsb: data = BytesIO(data) packetType = data.read(1) else: packetType = data data = self._dev arg1 = data.read(1) arg2 = data.read(1) arg3 = data.read(1) log.debug("Got packet of type %r with args: %r %r %r", packetType, arg1, arg2, arg3) # Info and extended key responses are the only packets with payload and checksum if packetType in (FS_PKT_INFO, FS_PKT_EXT_KEY): length = ord(arg1) payload = data.read(length) checksum = ord(data.read(1)) calculatedChecksum = BrailleDisplayDriver._calculateChecksum(packetType + arg1 + arg2 + arg3 + payload) assert calculatedChecksum == checksum, "Checksum mismatch, expected %s but got %s" % (checksum, payload[-1]) else: payload = FS_DATA_EMPTY self._handlePacket(packetType, arg1, arg2, arg3, payload)
def test_create_file_chunked(self): source = BytesIO(b'0123456789' * 1024 * 10) # 100k bytes source.seek(0) self.client.folder(self.root_folder.path).create() f = self.client.file(self.filepath) f.upload_chunk_size = 40000 f.upload(source) dest = BytesIO() self.client.file(self.filepath).download().write_to(dest) dest.seek(0) source.seek(0) self.assertEqual(source.read(), dest.read(), "Uploaded and downloaded file's contents do not match") partial_start = 5009 partial_size = 104 partial = f.download((partial_start, partial_start + partial_size - 1)) source.seek(partial_start) source_content = source.read(partial_size) partial_content = partial.read() self.assertEqual(source_content, partial_content, "Partial download content does not match")
class TBufferedTransport(TTransportBase, CReadableTransport): """Class that wraps another transport and buffers its I/O. The implementation uses a (configurable) fixed-size read buffer but buffers all writes until a flush is performed. """ DEFAULT_BUFFER = 4096 def __init__(self, trans, rbuf_size=DEFAULT_BUFFER): self.__trans = trans self.__wbuf = BytesIO() self.__rbuf = BytesIO("") self.__rbuf_size = rbuf_size def isOpen(self): return self.__trans.isOpen() def open(self): return self.__trans.open() def close(self): return self.__trans.close() def read(self, sz): ret = self.__rbuf.read(sz) if len(ret) != 0: return ret self.__rbuf = BytesIO(self.__trans.read(max(sz, self.__rbuf_size))) return self.__rbuf.read(sz) def write(self, buf): self.__wbuf.write(buf) def flush(self): out = self.__wbuf.getvalue() # reset wbuf before write/flush to preserve state on underlying failure self.__wbuf = BytesIO() self.__trans.write(out) self.__trans.flush() # Implement the CReadableTransport interface. @property def cstringio_buf(self): return self.__rbuf def cstringio_refill(self, partialread, reqlen): retstring = partialread if reqlen < self.__rbuf_size: # try to make a read of as much as we can. retstring += self.__trans.read(self.__rbuf_size) # but make sure we do read reqlen bytes. if len(retstring) < reqlen: retstring += self.__trans.readAll(reqlen - len(retstring)) self.__rbuf = BytesIO(retstring) return self.__rbuf
class Collector(object): """ Collector for map and reduce output values """ def __init__(self, scheme=None, outputClient=None): """ Parameters --------------------------------------------- scheme - The scheme for the datums to output - can be a json string - or an instance of Schema outputClient - The output client used to send messages to the parent """ if not (isinstance(scheme, schema.Schema)): scheme = schema.parse(scheme) if (outputClient is None): raise ValueError("output client can't be none.") self.scheme = scheme self.buff = StringIO() self.encoder = avio.BinaryEncoder(self.buff) self.datum_writer = avio.DatumWriter(writers_schema=self.scheme) self.outputClient = outputClient def collect(self, record, partition=None): """Collect a map or reduce output value Parameters ------------------------------------------------------ record - The record to write partition - Indicates the partition for a pre-partitioned map output - currently not supported """ self.buff.truncate(0) self.datum_writer.write(record, self.encoder) self.buff.flush() self.buff.seek(0) # delete all the data in the buffer if (partition is None): # TODO: Is there a more efficient way to read the data in self.buff? # we could use self.buff.read() but that returns the byte array as a string # will that work? We can also use self.buff.readinto to read it into # a bytearray but the byte array must be pre-allocated # self.outputClient.output(self.buff.buffer.read()) #its not a StringIO self.outputClient.request("output", {"datum": self.buff.read()}) else: self.outputClient.request("outputPartitioned", { "datum": self.buff.read(), "partition": partition })
class QiniuFile(File): def __init__(self, name, storage, mode): self._storage = storage if name.startswith(self._storage.location): name = name[len(self._storage.location):] self._name = name.lstrip('/') self._mode = mode self.file = BytesIO() self._is_dirty = False self._is_read = False @property def size(self): if self._is_dirty or self._is_read: # Get the size of a file like object # Check http://stackoverflow.com/a/19079887 old_file_position = self.file.tell() self.file.seek(0, os.SEEK_END) self._size = self.file.tell() self.file.seek(old_file_position, os.SEEK_SET) if not hasattr(self, '_size'): self._size = self._storage.size(self._name) return self._size def read(self, num_bytes=None): if not self._is_read: content = self._storage._read(self._name) self.file = BytesIO(content) self._is_read = True if num_bytes is None: data = self.file.read() else: data = self.file.read(num_bytes) if 'b' in self._mode: return data else: return force_text(data) def write(self, content): if 'w' not in self._mode: raise AttributeError("File was opened for read-only access.") self.file.write(force_bytes(content)) self._is_dirty = True self._is_read = True def close(self): if self._is_dirty: self.file.seek(0) self._storage._save(self._name, self.file) self.file.close()
class QiniuFile(File): def __init__(self, name, storage, mode): self._storage = storage self._name = name[len(self._storage.location):].lstrip('/') self._mode = mode self.file = BytesIO() self._is_dirty = False self._is_read = False @property def size(self): if self._is_dirty or self._is_read: # Get the size of a file like object # Check http://stackoverflow.com/a/19079887 old_file_position = self.file.tell() self.file.seek(0, os.SEEK_END) self._size = self.file.tell() self.file.seek(old_file_position, os.SEEK_SET) if not hasattr(self, '_size'): self._size = self._storage.size(self._name) return self._size def read(self, num_bytes=None): if not self._is_read: content = self._storage._read(self._name) self.file = BytesIO(content) self._is_read = True if num_bytes is None: data = self.file.read() else: data = self.file.read(num_bytes) if 'b' in self._mode: return data else: return force_text(data) def write(self, content): if 'w' not in self._mode: raise AttributeError("File was opened for read-only access.") self.file.write(force_bytes(content)) self._is_dirty = True self._is_read = True def close(self): if self._is_dirty: self.file.seek(0) self._storage._save(self._name, self.file) self.file.close()
def test_create_file_bytesio(self): source = BytesIO(b'vijayendra') source.seek(0) self.client.folder(self.root_folder.path).create() self.client.file(self.filepath).upload(source) dest = BytesIO() self.client.file(self.filepath).download().write_to(dest) dest.seek(0) source.seek(0) self.assertEqual(source.read(), dest.read(), "Uploaded and downloaded file's contents do not match")
def create_thumbnail(self): if not self.portrait: return thumbnail_height = settings.TALK_THUMBNAIL_HEIGHT pil_type, file_extension, django_type = self._get_pil_type_and_extension( ) self.portrait.seek(0) image = Image.open(BytesIO(self.portrait.read())) thumbnail_size = (int(thumbnail_height * image.width / image.height), thumbnail_height) image.thumbnail(thumbnail_size, Image.ANTIALIAS) temp_handle = BytesIO() image.save(temp_handle, pil_type) temp_handle.seek(0) suf = SimpleUploadedFile(os.path.split(self.portrait.name)[-1], temp_handle.read(), content_type=django_type) self.thumbnail.save("%s_thumbnail.%s" % (os.path.splitext(suf.name)[0], file_extension), suf, save=False)
def test_create_mor(self): img_file_name = "MooringSetupTest.png" img_file_path = os.path.dirname(os.path.realpath( __file__)) + os.path.sep + "data" + os.path.sep + img_file_name data = BytesIO() Image.open(img_file_path).save(data, "PNG") data.seek(0) file = ContentFile(data.read(), img_file_name) self.mooring_dic = {} mor_1 = models.MorMooringSetup( mor_name="MOR001", mor_max_depth=100, mor_link_setup_image="https://somelink.com", mor_setup_image=file) mor_1.save() # Check that the file was saved expected_path = os.path.join(settings.MEDIA_DIR, "whalesdb", "mooring_setup", img_file_name) self.assertTrue(os.path.exists(expected_path)) self.assertTrue(os.path.isfile(expected_path)) # Delete the image mor_1.delete() self.assertFalse(os.path.exists(expected_path))
def test_unittest_subTest_error(self): # test for issue #155 outdir = BytesIO() runner = xmlrunner.XMLTestRunner(stream=self.stream, output=outdir, verbosity=self.verbosity, **self.runner_kwargs) suite = unittest.TestSuite() suite.addTest(self.DummySubTest('test_subTest_error')) runner.run(suite) outdir.seek(0) output = outdir.read() output = _strip_xml( output, { '//testsuite': (), '//testcase': ('classname', 'name'), '//failure': ('message', ), }) self.assertRegexpMatches( output, br'<testcase classname="tests\.testsuite\.' br'(XMLTestRunnerTestCase\.)?DummySubTest" ' br'name="test_subTest_error \(i=0\)"') self.assertRegexpMatches( output, br'<testcase classname="tests\.testsuite\.' br'(XMLTestRunnerTestCase\.)?DummySubTest" ' br'name="test_subTest_error \(i=1\)"')
def raw_content(self): out_io = BytesIO() self._s3.Object(self.obj.bucket_name, self.obj.key).download_fileobj(out_io) out_io.seek(0) return out_io.read()
def test_write_simple_file(self): tractogram = Tractogram(DATA['streamlines'], affine_to_rasmm=np.eye(4)) tck_file = BytesIO() tck = TckFile(tractogram) tck.save(tck_file) tck_file.seek(0, os.SEEK_SET) new_tck = TckFile.load(tck_file) assert_tractogram_equal(new_tck.tractogram, tractogram) new_tck_orig = TckFile.load(DATA['simple_tck_fname']) assert_tractogram_equal(new_tck.tractogram, new_tck_orig.tractogram) tck_file.seek(0, os.SEEK_SET) assert_equal(tck_file.read(), open(DATA['simple_tck_fname'], 'rb').read()) # TCK file containing not well formatted entries in its header. tck_file = BytesIO() tck = TckFile(tractogram) tck.header['new_entry'] = 'value\n' # \n not allowed assert_raises(HeaderError, tck.save, tck_file) tck.header['new_entry'] = 'val:ue' # : not allowed assert_raises(HeaderError, tck.save, tck_file)
def _get_header(self): chunk = self._transport.chunk_read(0, HEADER_SIZE_GUESS) stream = BytesIO(chunk) magic = read_n(stream, len(MAGIC)) if magic == INCOMPLETE_MAGIC: raise ZSCorrupt("%s: looks like this ZS file was only " "partially written" % (self._transport.name, )) if magic != MAGIC: raise ZSCorrupt("%s: bad magic number (are you sure this is " "a ZS file?)" % (self._transport.name)) header_data_length, = read_format(stream, header_data_length_format) needed = header_data_length + CRC_LENGTH header_end = stream.tell() + needed remaining = len(chunk) - stream.tell() if remaining < needed: rest = self._transport.chunk_read(len(chunk), needed - remaining) stream = BytesIO(stream.read() + rest) header_encoded = read_n(stream, header_data_length) header_crc = read_n(stream, CRC_LENGTH) if encoded_crc64xz(header_encoded) != header_crc: raise ZSCorrupt("%s: header checksum mismatch" % (self._transport.name, )) return _decode_header_data(header_encoded), header_end
def test_xmlrunner_check_for_valid_xml_streamout(self): """ This test checks if the xml document is valid if there are more than one testsuite and the output of the report is a single stream. """ class DummyTestA(unittest.TestCase): def test_pass(self): pass class DummyTestB(unittest.TestCase): def test_pass(self): pass suite = unittest.TestSuite() suite.addTest( unittest.TestLoader().loadTestsFromTestCase(DummyTestA) ); suite.addTest( unittest.TestLoader().loadTestsFromTestCase(DummyTestB) ); outdir = BytesIO() runner = xmlrunner.XMLTestRunner( stream=self.stream, output=outdir, verbosity=self.verbosity, **self.runner_kwargs) runner.run(suite) outdir.seek(0) output = outdir.read() # Finally check if we have a valid XML document or not. try: minidom.parseString(output) except Exception as e: self.fail(e)
def test_xmlrunner_check_for_valid_xml_streamout(self): """ This test checks if the xml document is valid if there are more than one testsuite and the output of the report is a single stream. """ class DummyTestA(unittest.TestCase): def test_pass(self): pass class DummyTestB(unittest.TestCase): def test_pass(self): pass suite = unittest.TestSuite() suite.addTest(unittest.TestLoader().loadTestsFromTestCase(DummyTestA)) suite.addTest(unittest.TestLoader().loadTestsFromTestCase(DummyTestB)) outdir = BytesIO() runner = xmlrunner.XMLTestRunner( stream=self.stream, output=outdir, verbosity=self.verbosity, **self.runner_kwargs) runner.run(suite) outdir.seek(0) output = outdir.read() # Finally check if we have a valid XML document or not. try: minidom.parseString(output) except Exception as e: # pragma: no cover # note: we could remove the try/except, but it's more crude. self.fail(e)
class SSHStorageFile(File): def __init__(self, name, storage, mode): self._name = name self._storage = storage self._mode = mode self._is_dirty = False self.file = BytesIO() self._size = None @property def size(self): logger.debug("I am the size") if not hasattr(self, '_size'): self._size = self._storage.size(self._name) return self._size def read(self, num_bytes=None): logger.debug("I am the read") self.file = self._storage._read(self._name) return self.file.read(num_bytes) def write(self, content): logger.debug("I am the write") if 'w' not in self._mode: raise AttributeError("File was opened for read-only access.") self.file = BytesIO(content) self._is_dirty = True def close(self): logger.debug("I am the close") if self._is_dirty: self._storage._save(self._name, self.file.getvalue()) self.file.close()
def default(self, obj): try: return super(ObjectJSONEncoder, self).default(obj) except TypeError as e: if "not JSON serializable" not in str(e): raise if isinstance(obj, datetime.datetime): return { 'ISO8601_datetime': obj.strftime('%Y-%m-%dT%H:%M:%S.%f%z') } if isinstance(obj, datetime.date): return {'ISO8601_date': obj.isoformat()} if numpy is not None and isinstance( obj, numpy.ndarray) and obj.ndim == 1: memfile = BytesIO() numpy.save(memfile, obj) memfile.seek(0) serialized = json.dumps(memfile.read().decode('latin-1')) d = { '__ndarray__': serialized, } return d else: d = { '__class__': obj.__class__.__qualname__, '__module__': obj.__module__, } return d
class _UnseekableFile(object): def __init__(self, text): super(_UnseekableFile, self).__init__() self._text = BytesIO(text) def read(self, *args, **kwargs): return self._text.read(*args, **kwargs)
def generate_glassbrain_image(image_pk): from neurovault.apps.statmaps.models import Image import neurovault import matplotlib as mpl mpl.rcParams['savefig.format'] = 'jpg' my_dpi = 50 fig = plt.figure(figsize=(330.0/my_dpi, 130.0/my_dpi), dpi=my_dpi) img = Image.objects.get(pk=image_pk) f = BytesIO() try: glass_brain = plot_glass_brain(img.file.path, figure=fig) glass_brain.savefig(f, dpi=my_dpi) except: # Glass brains that do not produce will be given dummy image this_path = os.path.abspath(os.path.dirname(__file__)) f = open(os.path.abspath(os.path.join(this_path, "static","images","glass_brain_empty.jpg"))) raise finally: plt.close('all') f.seek(0) content_file = ContentFile(f.read()) img.thumbnail.save("glass_brain_%s.jpg" % img.pk, content_file) img.save()
class InMemoryZip(object): def __init__(self): # Create the in-memory file-like object self.in_memory_zip = BytesIO() def append(self, filename_in_zip, file_contents): '''Appends a file with name filename_in_zip and contents of file_contents to the in-memory zip.''' # Get a handle to the in-memory zip in append mode zf = zipfile.ZipFile(self.in_memory_zip, "a", zipfile.ZIP_DEFLATED, False) # Write the file to the in-memory zip zf.writestr(filename_in_zip, file_contents) # Mark the files as having been created on Windows so that # Unix permissions are not inferred as 0000 for zfile in zf.filelist: zfile.create_system = 0 return self def read(self): '''Returns a string with the contents of the in-memory zip.''' self.in_memory_zip.seek(0) return self.in_memory_zip.read() def writetofile(self, filename): '''Writes the in-memory zip to a file.''' f = file(filename, "w") f.write(self.read()) f.close()
def test_load_write_file(self): for fname in [DATA['empty_tck_fname'], DATA['simple_tck_fname']]: for lazy_load in [False, True]: tck = TckFile.load(fname, lazy_load=lazy_load) tck_file = BytesIO() tck.save(tck_file) loaded_tck = TckFile.load(fname, lazy_load=False) assert_tractogram_equal(loaded_tck.tractogram, tck.tractogram) # Check that the written file is the same as the one read. tck_file.seek(0, os.SEEK_SET) assert_equal(tck_file.read(), open(fname, 'rb').read()) # Save tractogram that has an affine_to_rasmm. for lazy_load in [False, True]: tck = TckFile.load(DATA['simple_tck_fname'], lazy_load=lazy_load) affine = np.eye(4) affine[0, 0] *= -1 # Flip in X tractogram = Tractogram(tck.streamlines, affine_to_rasmm=affine) new_tck = TckFile(tractogram, tck.header) tck_file = BytesIO() new_tck.save(tck_file) tck_file.seek(0, os.SEEK_SET) loaded_tck = TckFile.load(tck_file, lazy_load=False) assert_tractogram_equal(loaded_tck.tractogram, tractogram.to_world(lazy=True))
def create_public_image(self): """ This method creates a public version of the speaker image for display on the speaker lineup page. Speaker images with inappropriate aspect ratio may be cropped unfavourably. """ if not self.portrait: return public_image_width = settings.TALK_PUBLIC_SPEAKER_IMAGE_WIDTH public_image_height = settings.TALK_PUBLIC_SPEAKER_IMAGE_HEIGHT pil_type, file_extension, django_type = self._get_pil_type_and_extension( ) self.portrait.seek(0) image = Image.open(BytesIO(self.portrait.read())) scaled = ImageOps.fit(image, (public_image_width, public_image_height)) temp_handle = BytesIO() scaled.save(temp_handle, pil_type) temp_handle.seek(0) suf = SimpleUploadedFile(os.path.split(self.portrait.name)[-1], temp_handle.read(), content_type=django_type) self.public_image.save("%s_public.%s" % (os.path.splitext(suf.name)[0], file_extension), suf, save=False)
def encode_string_as_bmp_image(string_data): data_bytes = array.array("B", string_data.encode("utf-8")) num_bytes = len(data_bytes) # Encode data bytes to color data (RGB), one bit per channel. # This is to avoid errors due to different color spaces used in decoding. color_data = [] for byte in data_bytes: p = [int(x) * 255 for x in '{0:08b}'.format(byte)] color_data.append((p[0], p[1], p[2])) color_data.append((p[3], p[4], p[5])) color_data.append((p[6], p[7], 0)) # Render image. num_pixels = len(color_data) sqrt = int(math.ceil(math.sqrt(num_pixels))) img = Image.new("RGB", (sqrt, sqrt), "black") img.putdata(color_data) # Flush image to string. f = BytesIO() img.save(f, "BMP") f.seek(0) return f.read()
def query_mission_list(self, cache=True, get_query_payload=False): """ Returns a list of all available mission tables with descriptions """ request_payload = self._args_to_payload(Entry='none', mission='xxx', displaymode='BatchDisplay') if get_query_payload: return request_payload # Parse the results specially (it's ascii format, not fits) response = self.query_async(request_payload, url=conf.server, cache=cache) data = BytesIO(response.content) data_str = data.read().decode('utf-8') data_str = data_str.replace( 'Table xxx does not seem to exist!\n\n\n\nAvailable tables:\n', '') table = Table.read(data_str, format='ascii.fixed_width_two_line', delimiter='+', header_start=1, position_line=2, data_start=3, data_end=-1) return table
def test_upload(pycurl): """Upload local file.""" client = pycurl.Curl() def setopt(option, value): """Mock function to set input buffer.""" if option == pycurl.READDATA: client.input_buffer = value client.setopt.side_effect = setopt def perform(): """Mock function to read local file from input buffer.""" client.input_buffer.read() client.perform.side_effect = perform ftps = FTPS('ftps://<user>:<password>@host') input_buffer = BytesIO(b'<input>') with patch('ftps.ftps.open') as open_, patch('ftps.ftps.os'): open_().__enter__.return_value = input_buffer ftps.upload('f1.txt', 'f1.txt') assert input_buffer.read() == b''
def log_artifact(self, task_run, name, artifact, artifact_target): artifact_target.mkdir_parent() if isinstance(artifact, six.string_types): from targets.dir_target import DirTarget artifact_target_source = target(artifact) if isinstance(artifact_target_source, DirTarget): artifact_target_source.copy(artifact_target) else: data = artifact_target_source.read() artifact_target.write(data) return artifact_target if PYPLOT_INSTALLED and isinstance(artifact, Figure): temp = BytesIO() artifact.savefig(temp) temp.seek(0) artifact_target.write(temp.read(), mode="wb") return artifact_target raise DatabandRuntimeError( "Could not recognize artifact of type %s, must be string or matplotlib Figure" % type(artifact))
def setUp(self): super().setUp() self.data = Factory.MorFactory.get_valid_data() self.test_url = reverse_lazy('whalesdb:create_mor') # Since this is intended to be used as a pop-out form, the html file should start with an underscore self.test_expected_template = 'shared_models/shared_entry_form.html' self.expected_success_url = reverse_lazy('whalesdb:list_mor') self.expected_view = views.MorCreate self.expected_form = forms.MorForm self.img_file_name = "MooringSetupTest.png" self.img_file_path = os.path.dirname(os.path.realpath(__file__)) + os.path.sep + "data" + os.path.sep + \ self.img_file_name data = BytesIO() Image.open(self.img_file_path).save(data, "PNG") data.seek(0) file = ContentFile(data.read(), self.img_file_name) # add the image to the data array self.data['mor_setup_image'] = self.img_file_path
def test_xmlrunner_non_ascii_failures(self): suite = unittest.TestSuite() suite.addTest(self.DummyTest( 'test_non_ascii_runner_buffer_output_fail')) outdir = BytesIO() runner = xmlrunner.XMLTestRunner( stream=self.stream, output=outdir, verbosity=self.verbosity, **self.runner_kwargs) # allow output non-ascii letters to stdout orig_stdout = sys.stdout if getattr(sys.stdout, 'buffer', None): # Python3 sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8') else: # Python2 import codecs sys.stdout = codecs.getwriter("utf-8")(sys.stdout) try: runner.run(suite) finally: if getattr(sys.stdout, 'buffer', None): # Python3 # Not to be closed when TextIOWrapper is disposed. sys.stdout.detach() sys.stdout = orig_stdout outdir.seek(0) output = outdir.read() self.assertIn( u'Where is the café ?'.encode('utf8'), output) self.assertIn( u'The café could not be found'.encode('utf8'), output)
def request(self, method, uri, xml_root=None): """ :type method: str :type uri: str :type xml_root: ETree.Element :rtype: ETree.Element :raises ClarityException: if Clarity returns an exception as XML """ request_start_seconds = time.perf_counter() if self.log_requests else 0 if xml_root is None: response = self.raw_request(method, uri) else: # Falls back to StringIO and regular string for Python 2 outbuffer = BytesIO(b('<?xml version="1.0" encoding="UTF-8"?>\n')) ETree.ElementTree(xml_root).write(outbuffer) outbuffer.seek(0) log.debug("Data for request: %s", outbuffer.read()) outbuffer.seek(0) response = self.raw_request( method, uri, data=outbuffer.getvalue(), headers={'Content-Type': 'application/xml'}) outbuffer.close() xml_response_root = ETree.XML( response.content) if response.content else None if self.log_requests: request_elapsed_seconds = time.perf_counter( ) - request_start_seconds log.info("clarity request method: '%s' uri: %s took: %.3f s", method, uri, request_elapsed_seconds) return xml_response_root
def default(self, obj): try: return super(ObjectJSONEncoder, self).default(obj) except TypeError as e: if "not JSON serializable" not in str(e): raise if isinstance(obj, datetime.datetime): return {'ISO8601_datetime': obj.strftime('%Y-%m-%dT%H:%M:%S.%f%z')} if isinstance(obj, datetime.date): return {'ISO8601_date': obj.isoformat()} if numpy is not None and isinstance(obj, numpy.ndarray) and obj.ndim == 1: memfile = BytesIO() numpy.save(memfile, obj) memfile.seek(0) serialized = json.dumps(memfile.read().decode('latin-1')) d = { '__ndarray__': serialized, } return d else: d = { '__class__': obj.__class__.__qualname__, '__module__': obj.__module__, } return d
def _get_header(self): chunk = self._transport.chunk_read(0, HEADER_SIZE_GUESS) stream = BytesIO(chunk) magic = read_n(stream, len(MAGIC)) if magic == INCOMPLETE_MAGIC: raise ZSCorrupt("%s: looks like this ZS file was only " "partially written" % (self._transport.name,)) if magic != MAGIC: raise ZSCorrupt("%s: bad magic number (are you sure this is " "a ZS file?)" % (self._transport.name)) header_data_length, = read_format(stream, header_data_length_format) needed = header_data_length + CRC_LENGTH header_end = stream.tell() + needed remaining = len(chunk) - stream.tell() if remaining < needed: rest = self._transport.chunk_read(len(chunk), needed - remaining) stream = BytesIO(stream.read() + rest) header_encoded = read_n(stream, header_data_length) header_crc = read_n(stream, CRC_LENGTH) if encoded_crc64xz(header_encoded) != header_crc: raise ZSCorrupt("%s: header checksum mismatch" % (self._transport.name,)) return _decode_header_data(header_encoded), header_end
class FakeResponse(object): """A fake HTTPResponse object for testing.""" def __init__(self, code, body, headers=None): self.code = code self.msg = str(code) if headers is None: headers = {} self.headers = headers self.info = lambda: self.headers if isinstance(body, six.text_type): body = body.encode('utf-8') self.body_file = BytesIO(body) def read(self): """Read the entire response body.""" return self.body_file.read() def readline(self): """Read a single line from the response body.""" return self.body_file.readline() def close(self): """Close the connection.""" pass
def generate_glassbrain_image(image_pk): from neurovault.apps.statmaps.models import Image import matplotlib as mpl mpl.rcParams['savefig.format'] = 'jpg' my_dpi = 50 fig = plt.figure(figsize=(330.0 / my_dpi, 130.0 / my_dpi), dpi=my_dpi) img = Image.objects.get(pk=image_pk) f = BytesIO() try: glass_brain = plot_glass_brain(img.file.path, figure=fig) glass_brain.savefig(f, dpi=my_dpi) except: # Glass brains that do not produce will be given dummy image this_path = os.path.abspath(os.path.dirname(__file__)) f = open( os.path.abspath( os.path.join(this_path, "static", "images", "glass_brain_empty.jpg"))) raise finally: plt.close('all') f.seek(0) content_file = ContentFile(f.read()) img.thumbnail.save("glass_brain_%s.jpg" % img.pk, content_file) img.save()
def image_to_png(img): """Convert a PIL image to a PNG binary string.""" exp = BytesIO() img.save(exp, format='png') exp.seek(0) s = exp.read() exp.close() return s
def tostring(self): """ Return a string representation of the entire XML document. """ out = BytesIO() self.write(out) out.seek(0) return out.read()
def test_download_to(test_file, mock_box_session, mock_content_response): expected_url = test_file.get_url('content') mock_box_session.get.return_value = mock_content_response mock_writeable_stream = BytesIO() test_file.download_to(mock_writeable_stream) mock_writeable_stream.seek(0) assert mock_writeable_stream.read() == mock_content_response.content mock_box_session.get.assert_called_once_with(expected_url, expect_json_response=False, stream=True)
def compress_sequence(sequence): avg_block_size = 1.0 / AVERAGE_SPAN_BETWEEN_FLUSHES buf = StreamingBuffer() zfile = GzipFile(mode='wb', compresslevel=6, fileobj=buf) # Output headers... yield buf.read() flushes_remaining = FLUSH_LIMIT rnd = None count = None rnd = None for item in sequence: if rnd is None: rnd = Random(hash(item)) count = int(rnd.expovariate(avg_block_size)) chunking_buf = BytesIO(item) chunk = chunking_buf.read(count) while chunk: if count is not None: count -= len(chunk) zfile.write(chunk) if count <= 0: flushes_remaining -= 1 zfile.flush() yield buf.read() if flushes_remaining: count = int(rnd.expovariate(avg_block_size)) else: count = None if count is None: chunk = chunking_buf.read() else: chunk = chunking_buf.read(count) zfile.flush() yield buf.read() if chunk is None: break for item in sequence: zfile.write(chunking_buf.read()) zfile.flush() yield buf.read() zfile.close() yield buf.read()
def test_truncated_buffer(): b = BytesIO() plt.savefig(b) b.seek(0) b2 = BytesIO(b.read(20)) b2.seek(0) with pytest.raises(Exception): plt.imread(b2)
def read(self, *args, **kwargs): """Fail on second read.""" if self.called: self.close() self.called = True if PY2: return BytesIO.read(self, *args, **kwargs) else: return super(BadBytesIO, self).read(*args, **kwargs)
def n3(graph, with_head=False): # pragma: no cover """Serialize an RDF graph as N3, by default ommitting the namespace declarations.""" out = BytesIO() graph.serialize(out, format='n3') out.seek(0) res = out.read() if with_head: return res return res.split(binary_type('\n\n'), 1)[1]
def render(self, ctx, req): document = Document() self.write(ctx, req, document) # # TODO: add tsammalex license information! # d = BytesIO() document.save(d) d.seek(0) return d.read()
def check_binary(name, file_path=True): # Handles files if file_path is True or text if file_path is False if file_path: temp = open(name, "rb") else: temp = BytesIO(name) try: return util.is_binary(temp.read(1024)) finally: temp.close()
def test_doctest_example(self): suite = doctest.DocTestSuite(tests.doctest_example) outdir = BytesIO() stream = StringIO() runner = xmlrunner.XMLTestRunner(stream=stream, output=outdir, verbosity=0) runner.run(suite) outdir.seek(0) output = outdir.read() self.assertIn('classname="tests.doctest_example.Multiplicator" name="threetimes"'.encode("utf8"), output) self.assertIn('classname="tests.doctest_example" name="twice"'.encode("utf8"), output)
def data_preprocess(data, size=None, min_axis=None, batch=False): """ Takes data and prepares it for sending to the api including resizing and image data/structure standardizing. """ if batch: return [data_preprocess(el, size=size, min_axis=min_axis, batch=False) for el in data] if isinstance(data, string_types): if file_exists(data): # probably a path to an image preprocessed = Image.open(data) else: # base 64 encoded image data, a url, or raw content # send raw data to the server and let the server infer type b64_or_url = re.sub('^data:image/.+;base64,', '', data) return b64_or_url elif isinstance(data, Image.Image): # data is image from PIL preprocessed = data elif type(data).__name__ == "ndarray": # data is likely image from numpy/scipy if "float" in str(data.dtype) and data.min() >= 0 and data.max() <= 1: data *= 255. try: preprocessed = Image.fromarray(data.astype("uint8")) except TypeError: raise IndicoError( "Please ensure the numpy array is in a format by PIL. " "Values must be between 0 and 1 or between 0 and 255 in greyscale, rgb, or rgba format." ) else: # at this point we are unsure of the type -- it could be malformatted text or image data. raise IndicoError( "Invalid input datatype: `{}`. " "Ensure input data is one of the following types: " "`str`, `unicode`, `PIL.Image`, `np.ndarray`.".format( data.__class__.__name__ ) ) # if size or min_axis: preprocessed = resize_image(preprocessed, size, min_axis) # standardize on b64 encoding for sending image data over the wire temp_output = BytesIO() preprocessed.save(temp_output, format='PNG') temp_output.seek(0) output_s = temp_output.read() return base64.b64encode(output_s).decode('utf-8') if PY3 else base64.b64encode(output_s)
class VCRHTTPResponse(HTTPResponse): """ Stub reponse class that gets returned instead of a HTTPResponse """ def __init__(self, recorded_response): self.recorded_response = recorded_response self.reason = recorded_response['status']['message'] self.status = self.code = recorded_response['status']['code'] self.version = None self._content = BytesIO(self.recorded_response['body']['string']) self._closed = False headers = self.recorded_response['headers'] self.msg = parse_headers(headers) self.length = compat.get_header(self.msg, 'content-length') or None @property def closed(self): # in python3, I can't change the value of self.closed. So I' # twiddling self._closed and using this property to shadow the real # self.closed from the superclas return self._closed def read(self, *args, **kwargs): return self._content.read(*args, **kwargs) def readline(self, *args, **kwargs): return self._content.readline(*args, **kwargs) def close(self): self._closed = True return True def getcode(self): return self.status def isclosed(self): return self.closed def info(self): return parse_headers(self.recorded_response['headers']) def getheaders(self): message = parse_headers(self.recorded_response['headers']) return list(compat.get_header_items(message)) def getheader(self, header, default=None): values = [v for (k, v) in self.getheaders() if k.lower() == header.lower()] if values: return ', '.join(values) else: return default
def _decodeResponse(self, response): """Decompresses (if necessary) the response from the server.""" encoding = response.getheader('Content-Encoding') content = response.read() if encoding in ('gzip', 'x-gzip', 'deflate'): if encoding == 'deflate': data = BytesIO(zlib.decompress(content)) else: data = gzip.GzipFile('', 'rb', 9, BytesIO(content)) content = data.read() return content
def generate_surface_image(image_pk): from neurovault.apps.statmaps.models import Image from scipy.io import loadmat from scipy.interpolate import interpn img = Image.objects.get(pk=image_pk) if img.target_template_image in ['GenericMNI', 'MNI152NLin2009cAsym'] and \ img.data_origin == 'volume': img_vol = nib.load(img.file.path) data_vol = img_vol.get_data() if data_vol.ndim > 3: data_vol = data_vol[:, :, :, 0] #number of time points this_path = os.path.abspath(os.path.dirname(__file__)) for hemi in ['lh', 'rh']: ras_coor = loadmat(os.path.abspath(os.path.join(this_path, "static", "anatomical", "%s.avgMapping_allSub_RF_ANTs_MNI2fs.mat" % hemi)))['ras'] vox_coor = nib.affines.apply_affine(numpy.linalg.inv(img_vol.affine), ras_coor.T).T img_surf = nib.gifti.GiftiImage() if img.polymorphic_ctype.model == 'atlas' or (hasattr(img, 'map_type') and img.map_type in ['Pa', 'R']): method = 'nearest' else: method = 'linear' data_surf = interpn(points=[range(data_vol.shape[0]), range(data_vol.shape[1]), range(data_vol.shape[2])], values=data_vol, xi=vox_coor.T, method=method, bounds_error=False, fill_value=0) # without turning nan's to zeros Connectome Workbench behaves weird data_surf[numpy.isnan(data_surf)] = 0 # ASCII is the only encoding that produces outputs compatible with Connectome Workbench data_surf_gifti = nib.gifti.GiftiDataArray(data_surf, 'NIFTI_INTENT_NONE', 'NIFTI_TYPE_FLOAT32', 'ASCII') img_surf.add_gifti_data_array(data_surf_gifti) img_surf.meta.data.insert(0, nib.gifti.GiftiNVPairs('AnatomicalStructurePrimary', {'lh': 'CortexLeft', 'rh': 'CortexRight'}[hemi])) f = BytesIO() fmap = {'image': nib.FileHolder(fileobj=f), 'header': nib.FileHolder(fileobj=f)} img_surf.to_file_map(fmap) f.seek(0) content_file = ContentFile(f.read()) if hemi == 'lh': img.surface_left_file.save("%s.%s.func.gii" % (img.pk, {'lh': 'L', 'rh': 'R'}[hemi]), content_file) else: img.surface_right_file.save("%s.%s.func.gii" % (img.pk, {'lh': 'L', 'rh': 'R'}[hemi]), content_file) img.save() print("Surface image generation done.")
def test_xmlrunner_unsafe_unicode(self): suite = unittest.TestSuite() suite.addTest(self.DummyTest('test_unsafe_unicode')) outdir = BytesIO() runner = xmlrunner.XMLTestRunner( stream=self.stream, output=outdir, verbosity=self.verbosity, **self.runner_kwargs) runner.run(suite) outdir.seek(0) output = outdir.read() self.assertIn(u"<![CDATA[ABCD\n]]>".encode('utf8'), output)