def test_save_model_with_writable_caches(self): # If one or both cache elements are read-only, no saving. expected_mean_vec = numpy.array([1, 2, 3]) expected_rotation = numpy.eye(3) expected_mean_vec_bytes = BytesIO() # noinspection PyTypeChecker numpy.save(expected_mean_vec_bytes, expected_mean_vec) expected_mean_vec_bytes = expected_mean_vec_bytes.getvalue() expected_rotation_bytes = BytesIO() # noinspection PyTypeChecker numpy.save(expected_rotation_bytes, expected_rotation) expected_rotation_bytes = expected_rotation_bytes.getvalue() itq = ItqFunctor() itq.mean_vec = expected_mean_vec itq.rotation = expected_rotation itq.mean_vec_cache_elem = DataMemoryElement(readonly=False) itq.rotation_cache_elem = DataMemoryElement(readonly=False) itq.save_model() self.assertEqual(itq.mean_vec_cache_elem.get_bytes(), expected_mean_vec_bytes) self.assertEqual(itq.rotation_cache_elem.get_bytes(), expected_rotation_bytes)
class TFramedTransport(TTransportBase, CReadableTransport): """Class that wraps another transport and frames its I/O when writing.""" def __init__(self, trans,): self.__trans = trans self.__rbuf = BytesIO() self.__wbuf = BytesIO() def isOpen(self): return self.__trans.isOpen() def open(self): return self.__trans.open() def close(self): return self.__trans.close() def read(self, sz): ret = self.__rbuf.read(sz) if len(ret) != 0: return ret self.readFrame() return self.__rbuf.read(sz) def readFrame(self): buff = self.__trans.readAll(4) sz, = unpack('!i', buff) self.__rbuf = BytesIO(self.__trans.readAll(sz)) def write(self, buf): self.__wbuf.write(buf) def flush(self): wout = self.__wbuf.getvalue() wsz = len(wout) # reset wbuf before write/flush to preserve state on underlying failure self.__wbuf = BytesIO() # N.B.: Doing this string concatenation is WAY cheaper than making # two separate calls to the underlying socket object. Socket writes in # Python turn out to be REALLY expensive, but it seems to do a pretty # good job of managing string buffer operations without excessive copies buf = pack("!i", wsz) + wout self.__trans.write(buf) self.__trans.flush() # Implement the CReadableTransport interface. @property def cstringio_buf(self): return self.__rbuf def cstringio_refill(self, prefix, reqlen): # self.__rbuf will already be empty here because fastbinary doesn't # ask for a refill until the previous buffer is empty. Therefore, # we can start reading new frames immediately. while len(prefix) < reqlen: self.readFrame() prefix += self.__rbuf.getvalue() self.__rbuf = BytesIO(prefix) return self.__rbuf
def test_write(self): r = StringIO('ab') n = FrenchNormalizer(r) m = Matrix() w = BytesIO() m.feed(n) m.dump(w) expected = { (None,): { False: 1, }, (False,): { 'a': 1, }, ('a',): { 'b': 1, }, ('b',): { True: 1 } } print(pickle.loads(w.getvalue())) print(expected) assert pickle.loads(w.getvalue()) == expected w.close()
def figure2base64(fig): io = BytesIO() fig.savefig(io, format='png') try: fig_base64 = base64.encodebytes(io.getvalue()) # py3 except: fig_base64 = base64.encodestring(io.getvalue()) # py2 return fig_base64
def test_50_get(self): io = BytesIO() self.webdav.download('handler.py', io) self.assertEqual(inspect.getsource(data_handler), io.getvalue()) io.close() io = BytesIO() self.webdav.download('sample_handler.py', io) self.assertEqual(inspect.getsource(data_sample_handler), io.getvalue()) io.close()
def test_send_chunk(): v = b"foobarfoobar" for bs in range(1, len(v) + 2): s = BytesIO() writer.send_chunk(s, v, bs, 0, len(v)) assert s.getvalue() == v for start in range(len(v)): for end in range(len(v)): s = BytesIO() writer.send_chunk(s, v, bs, start, end) assert s.getvalue() == v[start:end]
def testExportDoesntIncludeParent(self): self.app = self.layer['app'] self.app._setOb('foo', DummyFolder('foo')) foo = self.app.foo foo['bar'] = DummyFolder('bar') savepoint(optimistic=True) # savepoint assigns oids # now let's export to a buffer and check the objects... exp = BytesIO() self.app._p_jar.exportFile(foo.bar._p_oid, exp) self.assertTrue(b'bar' in exp.getvalue()) self.assertFalse(b'foo' in exp.getvalue())
class UniversalBytesIO(object): def __init__(self, container=None, charset=None): self.charset = charset or settings.DEFAULT_CHARSET self._container = BytesIO() if container is None else container # These methods partially implement the file-like object interface. # See https://docs.python.org/3/library/io.html#io.IOBase def close(self): self._container.close() def write(self, content): self._container.write(self.make_bytes(content)) def flush(self): self._container.flush() def tell(self): return self._container.tell() def readable(self): return False def seekable(self): return False def writable(self): return True def writelines(self, lines): for line in lines: self.write(line) def make_bytes(self, value): """Turn a value into a bytestring encoded in the output charset.""" if isinstance(value, bytes): return bytes(value) if isinstance(value, six.text_type): return bytes(value.encode(self.charset)) # Handle non-string types return force_bytes(value, self.charset) def get_string_value(self): return self._container.getvalue().decode(self.charset) def getvalue(self): return self._container.getvalue() if sys.version_info[0:2] < (3, 5): def seek(self, *args, **kwargs): pass
def test_raw(self): s = BytesIO() r = next(language.parse_pathod("400:b'foo'")) language.serve(r, s, {}) v = s.getvalue() assert b"Content-Length" in v s = BytesIO() r = next(language.parse_pathod("400:b'foo':r")) language.serve(r, s, {}) v = s.getvalue() assert b"Content-Length" not in v
def test_write_values(): tst = b"foobarvoing" s = BytesIO() writer.write_values(s, [tst], []) assert s.getvalue() == tst for bs in range(1, len(tst) + 2): for off in range(len(tst)): s = BytesIO() writer.write_values( s, [tst], [(off, "disconnect")], blocksize=bs ) assert s.getvalue() == tst[:off]
def test_write_values_inject(): tst = b"foo" s = BytesIO() writer.write_values(s, [tst], [(0, "inject", b"aaa")], blocksize=5) assert s.getvalue() == b"aaafoo" s = BytesIO() writer.write_values(s, [tst], [(1, "inject", b"aaa")], blocksize=5) assert s.getvalue() == b"faaaoo" s = BytesIO() writer.write_values(s, [tst], [(1, "inject", b"aaa")], blocksize=5) assert s.getvalue() == b"faaaoo"
def gzip_text(text): contents = BytesIO() f = gzip.GzipFile(fileobj=contents, mode='wb') f.write(util.encode_text(text)) f.flush() f.close() return contents.getvalue()
def make_options_body(self): options_buf = BytesIO() write_stringmultimap(options_buf, { 'CQL_VERSION': ['3.0.1'], 'COMPRESSION': [] }) return options_buf.getvalue()
def _serve_compressed_histograms(self, query_params): """Given a tag and single run, return an array of compressed histograms.""" tag = query_params.get('tag') run = query_params.get('run') compressed_histograms = self._multiplexer.CompressedHistograms(run, tag) if query_params.get('format') == _OutputFormat.CSV: string_io = BytesIO() writer = csv.writer(string_io) # Build the headers; we have two columns for timing and two columns for # each compressed histogram bucket. headers = ['Wall time', 'Step'] if compressed_histograms: bucket_count = len(compressed_histograms[0].compressed_histogram_values) for i in xrange(bucket_count): headers += ['Edge %d basis points' % i, 'Edge %d value' % i] writer.writerow(headers) for compressed_histogram in compressed_histograms: row = [compressed_histogram.wall_time, compressed_histogram.step] for value in compressed_histogram.compressed_histogram_values: row += [value.rank_in_bps, value.value] writer.writerow(row) self._send_csv_response(string_io.getvalue()) else: self._send_json_response(compressed_histograms)
def visit_immutation(self, node, children): context = self._final_context() child_type = children[0].expr_name if child_type == 'preview': if self.tool == 'httpie': command = ['http'] + context.httpie_args(self.method, quote=True) else: assert self.tool == 'curl' command = ['curl'] + context.curl_args(self.method, quote=True) click.echo(' '.join(command)) elif child_type == 'action': output = BytesIO() try: env = Environment(stdout=output, is_windows=False) httpie_main(context.httpie_args(self.method), env=env) content = output.getvalue() finally: output.close() # XXX: Work around a bug of click.echo_via_pager(). When you pass # a bytestring to echo_via_pager(), it converts the bytestring with # str(b'abc'), which makes it "b'abc'". if six.PY2: content = unicode(content, 'utf-8') # noqa else: content = str(content, 'utf-8') click.echo_via_pager(content) return node
def test_disable_compression(self, *args): c = self.make_connection() c._callbacks = {0: c._handle_options_response} c.defunct = Mock() # disable compression c.compression = False locally_supported_compressions.pop('lz4', None) locally_supported_compressions.pop('snappy', None) locally_supported_compressions['lz4'] = ('lz4compress', 'lz4decompress') locally_supported_compressions['snappy'] = ('snappycompress', 'snappydecompress') # read in a SupportedMessage response header = self.make_header_prefix(SupportedMessage) # the server only supports snappy options_buf = BytesIO() write_stringmultimap(options_buf, { 'CQL_VERSION': ['3.0.3'], 'COMPRESSION': ['snappy', 'lz4'] }) options = options_buf.getvalue() message = self.make_msg(header, options) c.process_msg(message, len(message) - 8) self.assertEqual(c.decompressor, None)
def handle(name, cfg, cloud, log, _args): mycfg = cfg.get('random_seed', {}) seed_path = mycfg.get('file', '/dev/urandom') seed_data = mycfg.get('data', b'') seed_buf = BytesIO() if seed_data: seed_buf.write(_decode(seed_data, encoding=mycfg.get('encoding'))) # 'random_seed' is set up by Azure datasource, and comes already in # openstack meta_data.json metadata = cloud.datasource.metadata if metadata and 'random_seed' in metadata: seed_buf.write(util.encode_text(metadata['random_seed'])) seed_data = seed_buf.getvalue() if len(seed_data): log.debug("%s: adding %s bytes of random seed entropy to %s", name, len(seed_data), seed_path) util.append_file(seed_path, seed_data) command = mycfg.get('command', None) req = mycfg.get('command_required', False) try: env = os.environ.copy() env['RANDOM_SEED_FILE'] = seed_path handle_random_seed_command(command=command, required=req, env=env) except ValueError as e: log.warn("handling random command [%s] failed: %s", command, e) raise e
def test_encode_decode_empty_string(): # This is a regression test for # https://github.com/luispedro/jug/issues/39 s = BytesIO() jug.backends.encode.encode_to('', s) val = jug.backends.encode.decode_from(BytesIO(s.getvalue())) assert val == ''
def test_requested_compression_not_available(self, *args): c = self.make_connection() c._callbacks = {0: c._handle_options_response} c.defunct = Mock() # request lz4 compression c.compression = "lz4" locally_supported_compressions.pop('lz4', None) locally_supported_compressions.pop('snappy', None) locally_supported_compressions['lz4'] = ('lz4compress', 'lz4decompress') locally_supported_compressions['snappy'] = ('snappycompress', 'snappydecompress') # read in a SupportedMessage response header = self.make_header_prefix(SupportedMessage) # the server only supports snappy options_buf = BytesIO() write_stringmultimap(options_buf, { 'CQL_VERSION': ['3.0.3'], 'COMPRESSION': ['snappy'] }) options = options_buf.getvalue() message = self.make_msg(header, options) c.process_msg(message, len(message) - 8) # make sure it errored correctly c.defunct.assert_called_once_with(ANY) args, kwargs = c.defunct.call_args self.assertIsInstance(args[0], ProtocolError)
def compress_string(s): # avg_block_size is acutally the reciporical of the average # intended interflush distance. rnd = Random(s) flushes_remaining = FLUSH_LIMIT if len(s) < AVERAGE_SPAN_BETWEEN_FLUSHES * APPROX_MIN_FLUSHES: avg_block_size = APPROX_MIN_FLUSHES / float(len(s) + 1) else: avg_block_size = 1.0 / AVERAGE_SPAN_BETWEEN_FLUSHES s = StringIO(s) if isinstance(s, six.text_type) else BytesIO(s) zbuf = BytesIO() zfile = GzipFile(mode='wb', compresslevel=6, fileobj=zbuf) chunk = s.read(MIN_INTERFLUSH_INTERVAL + int(rnd.expovariate(avg_block_size))) while chunk and flushes_remaining: zfile.write(chunk) zfile.flush() flushes_remaining -= 1 chunk = s.read(MIN_INTERFLUSH_INTERVAL + int(rnd.expovariate(avg_block_size))) zfile.write(chunk) zfile.write(s.read()) zfile.close() return zbuf.getvalue()
def test_get(self): image = Image.new('RGBA', (8, 8)) image.paste((255, 0, 0, 0), (0, 0, 4, 4)) image.paste((0, 255, 0, 0), (0, 4, 4, 8)) image.paste((0, 0, 255, 0), (4, 0, 8, 4)) image.paste((0, 0, 0, 255), (4, 4, 8, 8)) string_io = StringIO() image.save(string_io, 'PNG') tile = Tile(TileCoord(1, 0, 0, 2), data=string_io.getvalue()) tiles = list(self.mtsts.get([tile])) self.assertEqual(len(tiles), 4) self.assertEqual(tiles[0].tilecoord, TileCoord(1, 0, 0)) image = Image.open(StringIO(tiles[0].data)) self.assertEqual(image.size, (2, 2)) self.assertEqual(image.getcolors(), [(4, (255, 0, 0, 0))]) self.assertEqual(tiles[1].tilecoord, TileCoord(1, 0, 1)) image = Image.open(StringIO(tiles[1].data)) self.assertEqual(image.size, (2, 2)) self.assertEqual(image.getcolors(), [(4, (0, 255, 0, 0))]) self.assertEqual(tiles[2].tilecoord, TileCoord(1, 1, 0)) image = Image.open(StringIO(tiles[2].data)) self.assertEqual(image.size, (2, 2)) self.assertEqual(image.getcolors(), [(4, (0, 0, 255, 0))]) self.assertEqual(tiles[3].tilecoord, TileCoord(1, 1, 1)) image = Image.open(StringIO(tiles[3].data)) self.assertEqual(image.size, (2, 2)) self.assertEqual(image.getcolors(), [(4, (0, 0, 0, 255))])
def test_simple_proguard_upload(self): project = self.create_project(name='foo') url = reverse('sentry-api-0-dsym-files', kwargs={ 'organization_slug': project.organization.slug, 'project_slug': project.slug, }) self.login_as(user=self.user) out = BytesIO() f = zipfile.ZipFile(out, 'w') f.writestr('proguard/%s.txt' % PROGUARD_UUID, PROGUARD_SOURCE) f.writestr('ignored-file.txt', b'This is just some stuff') f.close() response = self.client.post(url, { 'file': SimpleUploadedFile('symbols.zip', out.getvalue(), content_type='application/zip'), }, format='multipart') assert response.status_code == 201, response.content assert len(response.data) == 1 assert response.data[0]['headers'] == { 'Content-Type': 'text/x-proguard+plain' } assert response.data[0]['sha1'] == 'e6d3c5185dac63eddfdc1a5edfffa32d46103b44' assert response.data[0]['uuid'] == PROGUARD_UUID assert response.data[0]['objectName'] == 'proguard-mapping' assert response.data[0]['cpuName'] == 'any' assert response.data[0]['symbolType'] == 'proguard'
def serialize( self, destination=None, encoding="utf-8", format='xml', **args): if self.type in ('CONSTRUCT', 'DESCRIBE'): return self.graph.serialize( destination, encoding=encoding, format=format, **args) """stolen wholesale from graph.serialize""" from rdflib import plugin serializer = plugin.get(format, ResultSerializer)(self) if destination is None: stream = BytesIO() stream2 = EncodeOnlyUnicode(stream) serializer.serialize(stream2, encoding=encoding, **args) return stream.getvalue() if hasattr(destination, "write"): stream = destination serializer.serialize(stream, encoding=encoding, **args) else: location = destination scheme, netloc, path, params, query, fragment = urlparse(location) if netloc != "": print("WARNING: not saving as location" + "is not a local file reference") return fd, name = tempfile.mkstemp() stream = os.fdopen(fd, 'wb') serializer.serialize(stream, encoding=encoding, **args) stream.close() if hasattr(shutil, "move"): shutil.move(name, path) else: shutil.copy(name, path) os.remove(name)
def test_requested_compression_not_available(self, *args): c = self.make_connection() c._requests = {0: (c._handle_options_response, ProtocolHandler.decode_message)} c.defunct = Mock() # request lz4 compression c.compression = "lz4" locally_supported_compressions.pop('lz4', None) locally_supported_compressions.pop('snappy', None) locally_supported_compressions['lz4'] = ('lz4compress', 'lz4decompress') locally_supported_compressions['snappy'] = ('snappycompress', 'snappydecompress') # read in a SupportedMessage response header = self.make_header_prefix(SupportedMessage) # the server only supports snappy options_buf = BytesIO() write_stringmultimap(options_buf, { 'CQL_VERSION': ['3.0.3'], 'COMPRESSION': ['snappy'] }) options = options_buf.getvalue() c.process_msg(_Frame(version=4, flags=0, stream=0, opcode=SupportedMessage.opcode, body_offset=9, end_pos=9 + len(options)), options) # make sure it errored correctly c.defunct.assert_called_once_with(ANY) args, kwargs = c.defunct.call_args self.assertIsInstance(args[0], ProtocolError)
def test_use_requested_compression(self, *args): c = self.make_connection() c._requests = {0: (c._handle_options_response, ProtocolHandler.decode_message)} c.defunct = Mock() # request snappy compression c.compression = "snappy" locally_supported_compressions.pop('lz4', None) locally_supported_compressions.pop('snappy', None) locally_supported_compressions['lz4'] = ('lz4compress', 'lz4decompress') locally_supported_compressions['snappy'] = ('snappycompress', 'snappydecompress') # read in a SupportedMessage response header = self.make_header_prefix(SupportedMessage) # the server only supports snappy options_buf = BytesIO() write_stringmultimap(options_buf, { 'CQL_VERSION': ['3.0.3'], 'COMPRESSION': ['snappy', 'lz4'] }) options = options_buf.getvalue() c.process_msg(_Frame(version=4, flags=0, stream=0, opcode=SupportedMessage.opcode, body_offset=9, end_pos=9 + len(options)), options) self.assertEqual(c.decompressor, locally_supported_compressions['snappy'][1])
def serialize(self, xid=None): buff = BytesIO() formats = [] data = [] if xid is not None: formats.append(Int.fmt) data.append(xid) if self.opcode: formats.append(Int.fmt) data.append(self.opcode) for request in self.requests: header = MultiHeader(type=request.opcode, done=False, error=-1) header_format, header_data = header.render() formats.append(header_format) data.extend(header_data) payload_format, payload_data = request.render() formats.append(payload_format) data.extend(payload_data) footer = MultiHeader(type=-1, done=True, error=-1) footer_format, footer_data = footer.render() formats.append(footer_format) data.extend(footer_data) buff.write(struct.pack("!" + "".join(formats), *data)) return buff.getvalue()
def __init__(self, host, port, backend_mod=None, pool=None, is_ssl=False, extra_headers=[], proxy_pieces=None, **ssl_args): # connect the socket, if we are using an SSL connection, we wrap # the socket. self._s = backend_mod.Socket(socket.AF_INET, socket.SOCK_STREAM) self._s.connect((host, port)) if proxy_pieces: self._s.sendall(proxy_pieces) response = StringIO() while response.getvalue()[-4:] != b'\r\n\r\n': response.write(self._s.recv(1)) response.close() if is_ssl: self._s = ssl.wrap_socket(self._s, **ssl_args) self.extra_headers = extra_headers self.is_ssl = is_ssl self.backend_mod = backend_mod self.host = host self.port = port self._connected = True self._life = time.time() - random.randint(0, 10) self._pool = pool self._released = False
def gzip(f, *args, **kwargs): """GZip Flask Response Decorator.""" data = f(*args, **kwargs) if isinstance(data, Response): content = data.data else: content = data gzip_buffer = BytesIO() gzip_file = gzip2.GzipFile( mode='wb', compresslevel=4, fileobj=gzip_buffer ) gzip_file.write(content) gzip_file.close() gzip_data = gzip_buffer.getvalue() if isinstance(data, Response): data.data = gzip_data data.headers['Content-Encoding'] = 'gzip' data.headers['Content-Length'] = str(len(data.data)) return data return gzip_data
def test_decode_response_gzip(): body = b'gzip message' buf = BytesIO() f = gzip.GzipFile('a', fileobj=buf, mode='wb') f.write(body) f.close() compressed_body = buf.getvalue() buf.close() gzip_response = { 'body': {'string': compressed_body}, 'headers': { 'access-control-allow-credentials': ['true'], 'access-control-allow-origin': ['*'], 'connection': ['keep-alive'], 'content-encoding': ['gzip'], 'content-length': ['177'], 'content-type': ['application/json'], 'date': ['Wed, 02 Dec 2015 19:44:32 GMT'], 'server': ['nginx'] }, 'status': {'code': 200, 'message': 'OK'} } decoded_response = decode_response(gzip_response) assert decoded_response['body']['string'] == body assert decoded_response['headers']['content-length'] == [str(len(body))]
def OnFileViewHTML(self, evt): # Get an instance of the html file handler, use it to save the # document to a StringIO stream, and then display the # resulting html text in a dialog with a HtmlWindow. handler = rt.RichTextHTMLHandler() handler.SetFlags(rt.RICHTEXT_HANDLER_SAVE_IMAGES_TO_MEMORY) handler.SetFontSizeMapping([7,9,11,12,14,22,100]) stream = BytesIO() if not handler.SaveStream(self.rtc.GetBuffer(), stream): return import wx.html dlg = wx.Dialog(self, title="HTML", style=wx.DEFAULT_DIALOG_STYLE|wx.RESIZE_BORDER) html = wx.html.HtmlWindow(dlg, size=(500,400), style=wx.BORDER_SUNKEN) html.SetPage(stream.getvalue()) btn = wx.Button(dlg, wx.ID_CANCEL) sizer = wx.BoxSizer(wx.VERTICAL) sizer.Add(html, 1, wx.ALL|wx.EXPAND, 5) sizer.Add(btn, 0, wx.ALL|wx.CENTER, 10) dlg.SetSizer(sizer) sizer.Fit(dlg) dlg.ShowModal() handler.DeleteTemporaryImages()
def plot_spectrograms(specs, titles, stop_token_pred, audio_length, logdir, train_step, stop_token_target=None, number=0, append=False, save_to_tensorboard=False): """ Helper function to create a image to be logged to disk or a tf.Summary to be logged to tensorboard. Args: specs (array): array of images to show titles (array): array of titles. Must match lengths of specs array stop_token_pred (np.array): np.array of size [time, 1] containing the stop token predictions from the model. audio_length (int): lenth of the predicted spectrogram logdir (str): dir to save image file is save_to_tensorboard is disabled. train_step (int): current training step stop_token_target (np.array): np.array of size [time, 1] containing the stop token target. number (int): Current sample number (used if evaluating more than 1 sample from a batch) append (str): Optional string to append to file name eg. train, eval, infer save_to_tensorboard (bool): If False, the created image is saved to the logdir as a png file. If True, the function returns a tf.Summary object containing the image and will be logged to the current tensorboard file. Returns: tf.Summary or None """ num_figs = len(specs) + 1 fig, ax = plt.subplots(nrows=num_figs, figsize=(8, num_figs * 3)) for i, (spec, title) in enumerate(zip(specs, titles)): spec = np.pad(spec, ((1, 1), (1, 1)), "constant", constant_values=0.) spec = spec.astype(float) colour = ax[i].imshow(spec.T, cmap='viridis', interpolation=None, aspect='auto') ax[i].invert_yaxis() ax[i].set_title(title) fig.colorbar(colour, ax=ax[i]) if stop_token_target is not None: stop_token_target = stop_token_target.astype(float) ax[-1].plot(stop_token_target, 'r.') stop_token_pred = stop_token_pred.astype(float) ax[-1].plot(stop_token_pred, 'g.') ax[-1].axvline(x=audio_length) ax[-1].set_xlim(0, len(specs[0])) ax[-1].set_title("stop token") plt.xlabel('time') plt.tight_layout() cb = fig.colorbar(colour, ax=ax[-1]) cb.remove() if save_to_tensorboard: tag = "{}_image".format(append) iostream = BytesIO() fig.savefig(iostream, dpi=300) summary = tf.Summary.Image(encoded_image_string=iostream.getvalue(), height=int(fig.get_figheight() * 300), width=int(fig.get_figwidth() * 300)) summary = tf.Summary.Value(tag=tag, image=summary) plt.close(fig) return summary else: if append: name = '{}/Output_step{}_{}_{}.png'.format(logdir, train_step, number, append) else: name = '{}/Output_step{}_{}.png'.format(logdir, train_step, number) if logdir[0] != '/': name = "./" + name #save fig.savefig(name, dpi=300) plt.close(fig) return None
def download_image(request, datafile_id, region, size, rotation, quality, format=None): # @ReservedAssignment # Get datafile (and return an empty response if absent) try: datafile = DataFile.objects.get(pk=datafile_id) except DataFile.DoesNotExist: return HttpResponse('') is_public = datafile.is_public() if not is_public: # Check users has access to datafile if not has_datafile_download_access(request=request, datafile_id=datafile.id): return HttpResponse('') buf = BytesIO() try: file_obj = datafile.get_image_data() if file_obj is None: return HttpResponse('') from contextlib import closing with closing(file_obj) as f: with Image(file=f) as img: if len(img.sequence) > 1: img = Image(img.sequence[0]) # Handle region if region != 'full': x, y, w, h = map(int, region.split(',')) img.crop(x, y, width=w, height=h) # Handle size if size != 'full': # Check the image isn't empty if 0 in (img.height, img.width): return _bad_request('size', 'Cannot resize empty image') # Attempt resize if not _do_resize(img, size): return _bad_request('size', 'Invalid size argument: %s' % size) # Handle rotation if rotation: img.rotate(float(rotation)) # Handle quality (mostly by rejecting it) if quality not in ['native', 'color']: return _get_iiif_error( 'quality', 'This server does not support greyscale or bitonal quality.') # Handle format if format: mimetype = mimetypes.types_map['.%s' % format.lower()] img.format = format if mimetype not in ALLOWED_MIMETYPES: return _invalid_media_response() else: mimetype = datafile.get_mimetype() # If the native format is not allowed, pretend it doesn't exist. if mimetype not in ALLOWED_MIMETYPES: return HttpResponse('') img.save(file=buf) response = HttpResponse(buf.getvalue(), content_type=mimetype) response['Content-Disposition'] = \ 'inline; filename="%s.%s"' % (datafile.filename, format) # Set Cache if is_public: patch_cache_control(response, public=True, max_age=MAX_AGE) else: patch_cache_control(response, private=True, max_age=MAX_AGE) return response except WandException: return HttpResponse('') except ValueError: return HttpResponse('') except IOError: return HttpResponse('')
def respond(self, call_request): """ Called by a server to deserialize a request, compute and serialize a response or error. Compare to 'handle()' in Thrift. """ buffer_reader = BytesIO(call_request) buffer_decoder = io.BinaryDecoder(buffer_reader) buffer_writer = BytesIO() buffer_encoder = io.BinaryEncoder(buffer_writer) error = None response_metadata = {} try: remote_protocol = self.process_handshake(buffer_decoder, buffer_encoder) # handshake failure if remote_protocol is None: return buffer_writer.getvalue() # read request using remote protocol request_metadata = META_READER.read(buffer_decoder) remote_message_name = buffer_decoder.read_utf8() # get remote and local request schemas so we can do # schema resolution (one fine day) remote_message = remote_protocol.messages.get(remote_message_name) if remote_message is None: fail_msg = 'Unknown remote message: %s' % remote_message_name raise schema.AvroException(fail_msg) local_message = self.local_protocol.messages.get( remote_message_name) if local_message is None: fail_msg = 'Unknown local message: %s' % remote_message_name raise schema.AvroException(fail_msg) writers_schema = remote_message.request readers_schema = local_message.request request = self.read_request(writers_schema, readers_schema, buffer_decoder) # perform server logic try: response = self.invoke(local_message, request) except AvroRemoteException as e: error = e except Exception as e: error = AvroRemoteException(str(e)) # write response using local protocol META_WRITER.write(response_metadata, buffer_encoder) buffer_encoder.write_boolean(error is not None) if error is None: writers_schema = local_message.response self.write_response(writers_schema, response, buffer_encoder) else: writers_schema = local_message.errors self.write_error(writers_schema, error, buffer_encoder) except schema.AvroException as e: error = AvroRemoteException(str(e)) buffer_encoder = io.BinaryEncoder(BytesIO()) META_WRITER.write(response_metadata, buffer_encoder) buffer_encoder.write_boolean(True) self.write_error(SYSTEM_ERROR_SCHEMA, error, buffer_encoder) return buffer_writer.getvalue()
def packed_checksum(self, data): """Given the data of the entire packet return the checksum bytes""" self.field.setval(self.algo(data[self.start:self.end])) sio = BytesIO() self.field.pack(sio) return sio.getvalue()
def _npy_serialize(data): buffer = BytesIO() np.save(buffer, data) return buffer.getvalue()
def _npy_dumps(data): # Serializes a numpy array into a stream of npy-formatted bytes. buffer = BytesIO() np.save(buffer, data) return buffer.getvalue()
def dumps(cls, env): # type: (BuildEnvironment) -> unicode io = BytesIO() cls.dump(env, io) return io.getvalue()
class TestCarbonReporter(TimedTestCase): def setUp(self): super(TestCarbonReporter, self).setUp() self.output = BytesIO() self.registry = MetricsRegistry(clock=self.clock) self.maxDiff = None def connect(self, *args): # part of fake socket interface pass def sendall(self, data): # part of fake socket interface self.output.write(data) def close(self): # part of fake socket interface pass def tearDown(self): super(TestCarbonReporter, self).tearDown() def capture_test_metrics(self): self.clock.now = 1 h1 = self.registry.histogram("hist") for i in range(10): h1.add(2**i) gcb = self.registry.gauge("gcb", lambda: 123) gsimple = self.registry.gauge("gsimple").set_value(42) t1 = self.registry.timer("t1") m1 = self.registry.meter("m1") m1.mark() with t1.time(): c1 = self.registry.counter("c1") c2 = self.registry.counter("counter-2") c1.inc() c2.dec() c2.dec() self.clock.add(1) def test_report_now_plain(self): r = CarbonReporter(registry=self.registry, reporting_interval=1, clock=self.clock, socket_factory=lambda: self) self.capture_test_metrics() r.report_now() test_data = sorted(self.output.getvalue().decode().splitlines()) expected_data = sorted([ 'counter-2.count -2 2', 'c1.count 1 2', 'gsimple.value 42 2', 'gcb.value 123 2', 't1.1m_rate 0 2', 't1.999_percentile 1 2', 't1.15m_rate 0 2', 't1.99_percentile 1 2', 't1.mean_rate 1.0 2', 't1.95_percentile 1 2', 't1.min 1 2', 't1.50_percentile 1 2', 't1.5m_rate 0 2', 't1.count 1.0 2', 't1.75_percentile 1 2', 't1.std_dev 0.0 2', 't1.max 1 2', 't1.sum 1.0 2', 't1.avg 1.0 2', 'hist.count 10.0 2', 'hist.999_percentile 512 2', 'hist.99_percentile 512 2', 'hist.min 1 2', 'hist.95_percentile 512 2', 'hist.75_percentile 160.0 2', 'hist.std_dev 164.94851048466944 2' \ if PY3 else 'hist.std_dev 164.948510485 2', 'hist.max 512 2', 'hist.avg 102.3 2', 'm1.count 1.0 2', 'm1.1m_rate 0 2', 'm1.15m_rate 0 2', 'm1.5m_rate 0 2', 'm1.mean_rate 1.0 2', ]) self.assertEqual(test_data, expected_data) def test_report_now_pickle(self): r = CarbonReporter(registry=self.registry, reporting_interval=1, clock=self.clock, socket_factory=lambda: self, pickle_protocol=True) self.capture_test_metrics() r.report_now() test_data = sorted(pickle.loads(self.output.getvalue()[4:])) expected_data = sorted([('counter-2.count', (2, -2.0)), ('c1.count', (2, 1)), ('gsimple.value', (2, 42.0)), ('gcb.value', (2, 123.0)), ('t1.1m_rate', (2, 0.0)), ('t1.999_percentile', (2, 1)), ('t1.15m_rate', (2, 0.0)), ('t1.99_percentile', (2, 1)), ('t1.mean_rate', (2, 1)), ('t1.95_percentile', (2, 1)), ('t1.min', (2, 1)), ('t1.50_percentile', (2, 1)), ('t1.5m_rate', (2, 0.0)), ('t1.count', (2, 1)), ('t1.75_percentile', (2, 1)), ('t1.std_dev', (2, 0.0)), ('t1.max', (2, 1)), ('t1.sum', (2, 1)), ('t1.avg', (2, 1)), ('hist.count', (2, 10.0)), ('hist.999_percentile', (2, 512.0)), ('hist.99_percentile', (2, 512.0)), ('hist.min', (2, 1)), ('hist.95_percentile', (2, 512.0)), ('hist.75_percentile', (2, 160.0)), ('hist.std_dev', (2, 164.94851048466944)), ('hist.max', (2, 512.0)), ('hist.avg', (2, 102.3)), ('m1.count', (2, 1)), ('m1.1m_rate', (2, 0.0)), ('m1.15m_rate', (2, 0.0)), ('m1.5m_rate', (2, 0.0)), ('m1.mean_rate', (2, 1))]) self.assertEqual(test_data, expected_data)
def _serialize(message: Message) -> bytes: out = BytesIO() if six.PY3 else StringIO() with gzip.GzipFile(fileobj=out, mode="w") as f: f.write(message.SerializeToString()) return out.getvalue()
def test_real_resolving(self): url = reverse('sentry-api-0-dsym-files', kwargs={ 'organization_slug': self.project.organization.slug, 'project_slug': self.project.slug, }) self.login_as(user=self.user) out = BytesIO() f = zipfile.ZipFile(out, 'w') f.write( os.path.join(os.path.dirname(__file__), 'fixtures', 'hello.dsym'), 'dSYM/hello') f.close() response = self.client.post(url, { 'file': SimpleUploadedFile( 'symbols.zip', out.getvalue(), content_type='application/zip'), }, format='multipart') assert response.status_code == 201, response.content assert len(response.data) == 1 event_data = { "project": self.project.id, "platform": "cocoa", "debug_meta": { "images": [{ "type": "apple", "arch": "x86_64", "uuid": "502fc0a5-1ec1-3e47-9998-684fa139dca7", "image_vmaddr": "0x0000000100000000", "image_size": 4096, "image_addr": "0x0000000100000000", "name": "Foo.app/Contents/Foo" }], "sdk_info": { "dsym_type": "macho", "sdk_name": "macOS", "version_major": 10, "version_minor": 12, "version_patchlevel": 4, } }, "sentry.interfaces.Exception": { "values": [{ 'stacktrace': { "frames": [ { "function": "unknown", "instruction_addr": "0x0000000100000fa0" }, ] }, "type": "Fail", "value": "fail" }] }, } resp = self._postWithHeader(event_data) assert resp.status_code == 200 event = Event.objects.get() bt = event.interfaces['sentry.interfaces.Exception'].values[ 0].stacktrace frames = bt.frames assert frames[0].function == 'main' assert frames[0].filename == 'hello.c' assert frames[0].abs_path == '/tmp/hello.c' assert frames[0].lineno == 1
def departmentuser_csv_report(): """Output data from all DepartmentUser objects to a CSV, unpacking the various JSONField values. Returns a BytesIO object that can be written to a response or file. """ from .models import DepartmentUser FIELDS = [ 'email', 'username', 'given_name', 'surname', 'name', 'preferred_name', 'title', 'name_update_reference', 'employee_id', 'active', 'telephone', 'home_phone', 'mobile_phone', 'other_phone', 'extension', 'expiry_date', 'org_unit', 'cost_centre', 'parent', 'executive', 'vip', 'security_clearance', 'in_sync', 'contractor', 'ad_deleted', 'o365_licence', 'shared_account', 'populate_primary_group', 'notes', 'working_hours', 'sso_roles', 'org_data', 'alesco_data', 'ad_data', 'extra_data', 'date_created', 'date_ad_updated', 'date_updated', 'ad_dn', 'ad_guid' ] # Get any DepartmentUser with non-null alesco_data field. # alesco_data structure should be consistent to all (or null). du = DepartmentUser.objects.filter(alesco_data__isnull=False)[0] alesco_fields = du.alesco_data.keys() org_fields = { 'department': ('units', 0, 'name'), 'tier_2': ('units', 1, 'name'), 'tier_3': ('units', 2, 'name'), 'tier_4': ('units', 3, 'name'), 'tier_5': ('units', 4, 'name') } header = [f for f in FIELDS] # These fields appended manually: header.append('account_type') header.append('position_type') header += org_fields.keys() header += alesco_fields # Get any DepartmentUser with non-null org_data field for the keys. if DepartmentUser.objects.filter(org_data__isnull=False).exists(): du = DepartmentUser.objects.filter(org_data__isnull=False)[0] cc_keys = du.org_data['cost_centre'].keys() header += ['cost_centre_{}'.format(k) for k in cc_keys] location_keys = du.org_data['location'].keys() header += ['location_{}'.format(k) for k in location_keys] header.append('secondary_location') # Get any DepartmentUser with non-null ad_data field for the keys. if DepartmentUser.objects.filter(ad_data__isnull=False).exists(): du = DepartmentUser.objects.filter(ad_data__isnull=False)[0] ad_keys = du.ad_data.keys() if 'mailbox' in ad_keys: ad_keys.remove('mailbox') # Remove the nested object. header += ['ad_{}'.format(k) for k in ad_keys] # Write data for all DepartmentUser objects to the CSV stream = BytesIO() wr = unicodecsv.writer(stream, encoding='utf-8') wr.writerow(header) for u in DepartmentUser.objects.all(): record = [] for f in FIELDS: record.append(getattr(u, f)) try: # Append account_type display value. record.append(u.get_account_type_display()) except: record.append('') try: # Append position_type display value. record.append(u.get_position_type_display()) except: record.append('') for o in org_fields: try: src = u.org_data for x in org_fields[o]: src = src[x] record.append(src) except: record.append('') for a in alesco_fields: try: record.append(u.alesco_data[a]) except: record.append('') for i in cc_keys: try: record.append(u.org_data['cost_centre'][i]) except: record.append('') for i in location_keys: try: record.append(u.org_data['location'][i]) except: record.append('') if u.org_data and 'secondary_location' in u.org_data: record.append(u.org_data['secondary_location']) else: record.append('') for i in ad_keys: try: record.append(u.ad_data[i]) except: record.append('') # Write the row to the CSV stream. wr.writerow(record) return stream.getvalue()
def write_page(self, site, page, filename): self.out('Writing sitemap %s.' % filename, 2) old_page_md5 = None urls = [] if conf.MOCK_SITE: if conf.MOCK_SITE_NAME is None: raise ImproperlyConfigured( "STATICSITEMAPS_MOCK_SITE_NAME must not be None. Try setting to www.yoursite.com" ) from django.contrib.sites.requests import RequestSite from django.test.client import RequestFactory rs = RequestSite(RequestFactory().get( '/', SERVER_NAME=conf.MOCK_SITE_NAME)) try: if callable(site): if conf.MOCK_SITE: urls.extend(site().get_urls( page, rs, protocol=conf.MOCK_SITE_PROTOCOL)) else: urls.extend(site().get_urls(page, protocol=conf.FORCE_PROTOCOL)) else: if conf.MOCK_SITE: urls.extend( site.get_urls(page, rs, protocol=conf.MOCK_SITE_PROTOCOL)) else: urls.extend( site.get_urls(page, protocol=conf.FORCE_PROTOCOL)) except EmptyPage: self.out("Page %s empty" % page) except PageNotAnInteger: self.out("No page '%s'" % page) lastmods = [ lastmod for lastmod in [u.get('lastmod') for u in urls] if lastmod is not None ] file_lastmod = max(lastmods) if len(lastmods) > 0 else None path = os.path.join(conf.ROOT_DIR, filename) template = getattr(site, 'sitemap_template', 'sitemap.xml') if self.storage.exists(path): old_page_md5 = self.read_hash(path) self.storage.delete(path) output = smart_str(loader.render_to_string(template, {'urlset': urls})) self._write(path, output) with self.storage.open(path) as sitemap_page: if old_page_md5 != self.get_hash(sitemap_page.read()): self.has_changes = True if conf.USE_GZIP: if conf.GZIP_METHOD not in [ 'python', 'system', ]: raise ImproperlyConfigured( "STATICSITEMAPS_GZIP_METHOD must be in ['python', 'system']" ) if conf.GZIP_METHOD == 'system' and not os.path.exists( conf.SYSTEM_GZIP_PATH): raise ImproperlyConfigured( 'STATICSITEMAPS_SYSTEM_GZIP_PATH does not exist') if conf.GZIP_METHOD == 'system' and not isinstance( self.storage, FileSystemStorage): raise ImproperlyConfigured( 'system gzip method can only be used with FileSystemStorage' ) if conf.GZIP_METHOD == 'system': # GZIP with system gzip binary subprocess.call([ conf.SYSTEM_GZIP_PATH, '-f', path, ]) else: # GZIP with python gzip lib try: gzipped_path = '%s.gz' % path if self.storage.exists(gzipped_path): self.storage.delete(gzipped_path) self.out('Compressing...', 2) buf = BytesIO() with gzip.GzipFile(fileobj=buf, mode="w") as f: f.write(output.encode('utf-8')) self.storage.save(gzipped_path, ContentFile(buf.getvalue())) except OSError: self.out("Compress %s file error" % path) return file_lastmod
def get_qrcode(self, data): img = qrcode.make(data, image_factory=SvgPathImage) buf = BytesIO() img.save(buf) return buf.getvalue()
def make_error_body(self, code, msg): buf = BytesIO() write_int(buf, code) write_string(buf, msg) return buf.getvalue()
def _plist_to_bin(self, plist): f = BytesIO() writePlist(plist, f) return f.getvalue()
def get_rendered_output(self): bio = BytesIO() self.workbook.save(bio) return bio.getvalue()
def test_70_get(self): io = BytesIO() self.webdav.download('sample_handler.py', io) self.assertEqual(utils.text(inspect.getsource(data_handler)), utils.text(io.getvalue())) io.close()
def getvalue(self): ret = IO.getvalue(self) if isinstance(ret, six.binary_type): ret = ret.decode("utf-8") return ret
def __init__(self, plot, close_plot=True, bbox_inches="tight", width=None, height=None, **kwargs): """ Create a block containing a matplotlib figure :param plot: A matplotlib figure, axes or artist object. :close_plot: Optional (default=True). Set to True to close the plot after it is captured into an image and avoid lingering plot windows. :bbox_inches: Optional bounding box parameter for 'figure.savefig'. :param kwargs: Optional styling arguments. The `style` keyword argument has special meaning in that it allows styling to be grouped as one argument. It is also useful in case a styling parameter name clashes with a standard block parameter. """ if not isinstance(plot, Artist): raise ValueError('PlotBlock contents must be matplotlib Artist') if isinstance(plot, Figure): figure = plot elif isinstance(plot, Artist): figure = plot.get_figure() else: raise ValueError("Unexpected plot object type %s", type(plot)) img_data = BytesIO() legends = [] for ax in figure.get_axes(): legend = ax.get_legend() if legend is not None: # Patch Legend get_window_extent since there seems to be a bug where # it is passed an unexpected renderer instance. _orig_get_window_extent = legend.get_window_extent def _patched_get_window_extent(*_): return _orig_get_window_extent() legend.get_window_extent = _patched_get_window_extent legends.append(legend) if len(figure.axes) == 0: # empty plot, disable bbox_inches to that savefig still works bbox_inches = None figure.savefig(img_data, dpi=_PLOT_DPI, format=_PLOT_FORMAT, bbox_extra_artists=legends, bbox_inches=bbox_inches) plt_width, plt_height = figure.get_size_inches() width = width or "{:0.3f}in".format(plt_width) height = height or "{:0.3f}in".format(plt_height) if close_plot: plt.close(figure) super(PlotBlock, self).__init__(img_data.getvalue(), _PLOT_MIME_TYPE, width=width, height=height, **kwargs)
def test_simple_cache_clear(self): project = self.create_project(name="foo") url = reverse( "sentry-api-0-dsym-files", kwargs={ "organization_slug": project.organization.slug, "project_slug": project.slug }, ) self.login_as(user=self.user) out = BytesIO() f = zipfile.ZipFile(out, "w") f.writestr("proguard/%s.txt" % PROGUARD_UUID, PROGUARD_SOURCE) f.writestr("ignored-file.txt", b"This is just some stuff") f.close() response = self.client.post( url, { "file": SimpleUploadedFile("symbols.zip", out.getvalue(), content_type="application/zip") }, format="multipart", ) assert response.status_code == 201, response.content assert len(response.data) == 1 assert response.data[0]["headers"] == { "Content-Type": "text/x-proguard+plain" } assert response.data[0][ "sha1"] == "e6d3c5185dac63eddfdc1a5edfffa32d46103b44" assert response.data[0]["uuid"] == PROGUARD_UUID assert response.data[0]["objectName"] == "proguard-mapping" assert response.data[0]["cpuName"] == "any" assert response.data[0]["symbolType"] == "proguard" difs = ProjectDebugFile.difcache.fetch_difs(project=project, debug_ids=[PROGUARD_UUID], features=["mapping"]) assert len(difs) == 1 assert os.path.isfile(difs[PROGUARD_UUID]) # if we clear now, nothing happens ProjectDebugFile.difcache.clear_old_entries() assert os.path.isfile(difs[PROGUARD_UUID]) # Put the time into the future real_time = time.time time.time = lambda: real_time() + 60 * 60 * 48 try: ProjectDebugFile.difcache.clear_old_entries() finally: time.time = real_time # But it's gone now assert not os.path.isfile(difs[PROGUARD_UUID])
def _create_random_media_file(shop, file_path): path, name = os.path.split(file_path) pil_image = generate_image(2, 2) sio = BytesIO() pil_image.save(sio, "JPEG", quality=45) filer_file = filer_image_from_data(request=None, path=path, file_name=name, file_data=sio.getvalue()) media_file = MediaFile.objects.create(file=filer_file) media_file.shops.add(shop) return media_file
def method(self, **kwargs): # Don't bother with doc string, it will be over-written by createMethod. for name in six.iterkeys(kwargs): if name not in parameters.argmap: raise TypeError('Got an unexpected keyword argument "%s"' % name) # Remove args that have a value of None. keys = list(kwargs.keys()) for name in keys: if kwargs[name] is None: del kwargs[name] for name in parameters.required_params: if name not in kwargs: # temporary workaround for non-paging methods incorrectly requiring # page token parameter (cf. drive.changes.watch vs. drive.changes.list) if name not in _PAGE_TOKEN_NAMES or _findPageTokenName( _methodProperties(methodDesc, schema, 'response')): raise TypeError('Missing required parameter "%s"' % name) for name, regex in six.iteritems(parameters.pattern_params): if name in kwargs: if isinstance(kwargs[name], six.string_types): pvalues = [kwargs[name]] else: pvalues = kwargs[name] for pvalue in pvalues: if re.match(regex, pvalue) is None: raise TypeError( 'Parameter "%s" value "%s" does not match the pattern "%s"' % (name, pvalue, regex)) for name, enums in six.iteritems(parameters.enum_params): if name in kwargs: # We need to handle the case of a repeated enum # name differently, since we want to handle both # arg='value' and arg=['value1', 'value2'] if (name in parameters.repeated_params and not isinstance(kwargs[name], six.string_types)): values = kwargs[name] else: values = [kwargs[name]] for value in values: if value not in enums: raise TypeError( 'Parameter "%s" value "%s" is not an allowed value in "%s"' % (name, value, str(enums))) actual_query_params = {} actual_path_params = {} for key, value in six.iteritems(kwargs): to_type = parameters.param_types.get(key, 'string') # For repeated parameters we cast each member of the list. if key in parameters.repeated_params and type(value) == type([]): cast_value = [_cast(x, to_type) for x in value] else: cast_value = _cast(value, to_type) if key in parameters.query_params: actual_query_params[parameters.argmap[key]] = cast_value if key in parameters.path_params: actual_path_params[parameters.argmap[key]] = cast_value body_value = kwargs.get('body', None) media_filename = kwargs.get('media_body', None) media_mime_type = kwargs.get('media_mime_type', None) if self._developerKey: actual_query_params['key'] = self._developerKey model = self._model if methodName.endswith('_media'): model = MediaModel() elif 'response' not in methodDesc: model = RawModel() headers = {} headers, params, query, body = model.request(headers, actual_path_params, actual_query_params, body_value) expanded_url = uritemplate.expand(pathUrl, params) url = _urljoin(self._baseUrl, expanded_url + query) resumable = None multipart_boundary = '' if media_filename: # Ensure we end up with a valid MediaUpload object. if isinstance(media_filename, six.string_types): if media_mime_type is None: logger.warning( 'media_mime_type argument not specified: trying to auto-detect for %s', media_filename) media_mime_type, _ = mimetypes.guess_type(media_filename) if media_mime_type is None: raise UnknownFileType(media_filename) if not mimeparse.best_match([media_mime_type], ','.join(accept)): raise UnacceptableMimeTypeError(media_mime_type) media_upload = MediaFileUpload(media_filename, mimetype=media_mime_type) elif isinstance(media_filename, MediaUpload): media_upload = media_filename else: raise TypeError('media_filename must be str or MediaUpload.') # Check the maxSize if media_upload.size( ) is not None and media_upload.size() > maxSize > 0: raise MediaUploadSizeError("Media larger than: %s" % maxSize) # Use the media path uri for media uploads expanded_url = uritemplate.expand(mediaPathUrl, params) url = _urljoin(self._baseUrl, expanded_url + query) if media_upload.resumable(): url = _add_query_parameter(url, 'uploadType', 'resumable') if media_upload.resumable(): # This is all we need to do for resumable, if the body exists it gets # sent in the first request, otherwise an empty body is sent. resumable = media_upload else: # A non-resumable upload if body is None: # This is a simple media upload headers['content-type'] = media_upload.mimetype() body = media_upload.getbytes(0, media_upload.size()) url = _add_query_parameter(url, 'uploadType', 'media') else: # This is a multipart/related upload. msgRoot = MIMEMultipart('related') # msgRoot should not write out it's own headers setattr(msgRoot, '_write_headers', lambda self: None) # attach the body as one part msg = MIMENonMultipart(*headers['content-type'].split('/')) msg.set_payload(body) msgRoot.attach(msg) # attach the media as the second part msg = MIMENonMultipart(*media_upload.mimetype().split('/')) msg['Content-Transfer-Encoding'] = 'binary' payload = media_upload.getbytes(0, media_upload.size()) msg.set_payload(payload) msgRoot.attach(msg) # encode the body: note that we can't use `as_string`, because # it plays games with `From ` lines. fp = BytesIO() g = _BytesGenerator(fp, mangle_from_=False) g.flatten(msgRoot, unixfrom=False) body = fp.getvalue() multipart_boundary = msgRoot.get_boundary() headers['content-type'] = ( 'multipart/related; ' 'boundary="%s"') % multipart_boundary url = _add_query_parameter(url, 'uploadType', 'multipart') logger.info('URL being requested: %s %s' % (httpMethod, url)) return self._requestBuilder(self._http, model.response, url, method=httpMethod, body=body, headers=headers, methodId=methodId, resumable=resumable)
def test_error_on_resolving(self): url = reverse( 'sentry-api-0-dsym-files', kwargs={ 'organization_slug': self.project.organization.slug, 'project_slug': self.project.slug, } ) self.login_as(user=self.user) out = BytesIO() f = zipfile.ZipFile(out, 'w') f.writestr('proguard/%s.txt' % PROGUARD_BUG_UUID, PROGUARD_BUG_SOURCE) f.close() response = self.client.post( url, { 'file': SimpleUploadedFile('symbols.zip', out.getvalue(), content_type='application/zip'), }, format='multipart' ) assert response.status_code == 201, response.content assert len(response.data) == 1 event_data = { "sentry.interfaces.User": { "ip_address": "31.172.207.97" }, "extra": {}, "project": self.project.id, "platform": "java", "debug_meta": { "images": [{ "type": "proguard", "uuid": PROGUARD_BUG_UUID, }] }, "sentry.interfaces.Exception": { "values": [ { 'stacktrace': { "frames": [ { "function": "a", "abs_path": None, "module": "org.a.b.g$a", "filename": None, "lineno": 67, }, { "function": "a", "abs_path": None, "module": "org.a.b.g$a", "filename": None, "lineno": 69, }, ] }, "type": "RuntimeException", "value": "Shit broke yo" } ] }, } resp = self._postWithHeader(event_data) assert resp.status_code == 200 event = Event.objects.get() assert len(event.data['errors']) == 1 assert event.data['errors'][0] == { 'mapping_uuid': u'071207ac-b491-4a74-957c-2c94fd9594f2', 'type': 'proguard_missing_lineno', }
def test_basic_resolving(self): url = reverse( 'sentry-api-0-dsym-files', kwargs={ 'organization_slug': self.project.organization.slug, 'project_slug': self.project.slug, } ) self.login_as(user=self.user) out = BytesIO() f = zipfile.ZipFile(out, 'w') f.writestr('proguard/%s.txt' % PROGUARD_UUID, PROGUARD_SOURCE) f.writestr('ignored-file.txt', b'This is just some stuff') f.close() response = self.client.post( url, { 'file': SimpleUploadedFile( 'symbols.zip', out.getvalue(), content_type='application/zip'), }, format='multipart' ) assert response.status_code == 201, response.content assert len(response.data) == 1 event_data = { "sentry.interfaces.User": { "ip_address": "31.172.207.97" }, "extra": {}, "project": self.project.id, "platform": "java", "debug_meta": { "images": [{ "type": "proguard", "uuid": PROGUARD_UUID, }] }, "sentry.interfaces.Exception": { "values": [ { 'stacktrace': { "frames": [ { "function": "a", "abs_path": None, "module": "org.a.b.g$a", "filename": None, "lineno": 67, }, { "function": "a", "abs_path": None, "module": "org.a.b.g$a", "filename": None, "lineno": 69, }, ] }, "type": "RuntimeException", "value": "Shit broke yo" } ] }, } # We do a preflight post, because there are many queries polluting the array # before the actual "processing" happens (like, auth_user) self._postWithHeader(event_data) with self.assertWriteQueries({ 'nodestore_node': 2, 'sentry_eventtag': 1, 'sentry_eventuser': 1, 'sentry_filtervalue': 2, 'sentry_groupedmessage': 1, 'sentry_message': 1, 'sentry_messagefiltervalue': 2, 'sentry_userip': 1, 'sentry_userreport': 1 }): resp = self._postWithHeader(event_data) assert resp.status_code == 200 event = Event.objects.first() bt = event.interfaces['sentry.interfaces.Exception'].values[0].stacktrace frames = bt.frames assert frames[0].function == 'getClassContext' assert frames[0].module == 'org.slf4j.helpers.Util$ClassContextSecurityManager' assert frames[1].function == 'getExtraClassContext' assert frames[1].module == 'org.slf4j.helpers.Util$ClassContextSecurityManager' assert event.culprit == ( 'org.slf4j.helpers.Util$ClassContextSecurityManager ' 'in getExtraClassContext' )
def test_broken_conversion(self): url = reverse('sentry-api-0-dsym-files', kwargs={ 'organization_slug': self.project.organization.slug, 'project_slug': self.project.slug, }) self.login_as(user=self.user) out = BytesIO() f = zipfile.ZipFile(out, 'w') f.write( os.path.join(os.path.dirname(__file__), 'fixtures', 'hello.dsym'), 'dSYM/hello') f.close() original_make_symcache = Object.make_symcache def broken_make_symcache(self): raise SymbolicError('shit on fire') Object.make_symcache = broken_make_symcache try: response = self.client.post(url, { 'file': SimpleUploadedFile('symbols.zip', out.getvalue(), content_type='application/zip'), }, format='multipart') assert response.status_code == 201, response.content assert len(response.data) == 1 event_data = { "project": self.project.id, "platform": "cocoa", "debug_meta": { "images": [{ "type": "apple", "arch": "x86_64", "uuid": "502fc0a5-1ec1-3e47-9998-684fa139dca7", "image_vmaddr": "0x0000000100000000", "image_size": 4096, "image_addr": "0x0000000100000000", "name": "Foo.app/Contents/Foo" }], "sdk_info": { "dsym_type": "macho", "sdk_name": "macOS", "version_major": 10, "version_minor": 12, "version_patchlevel": 4, } }, "sentry.interfaces.Exception": { "values": [{ 'stacktrace': { "frames": [ { "function": "unknown", "instruction_addr": "0x0000000100000fa0" }, ] }, "type": "Fail", "value": "fail" }] }, } for _ in range(3): resp = self._postWithHeader(event_data) assert resp.status_code == 200 event = Event.objects.get() errors = event.data['errors'] assert len(errors) == 1 assert errors[0] == { 'image_arch': u'x86_64', 'image_path': u'Foo.app/Contents/Foo', 'image_uuid': u'502fc0a5-1ec1-3e47-9998-684fa139dca7', 'message': u'shit on fire', 'type': 'native_bad_dsym' } event.delete() finally: Object.make_symcache = original_make_symcache
def _compress(self, text): contents = BytesIO() gz_fh = gzip.GzipFile(mode='wb', fileobj=contents) gz_fh.write(text) gz_fh.close() return contents.getvalue()
def test_basic_resolving(self): url = reverse( "sentry-api-0-dsym-files", kwargs={ "organization_slug": self.project.organization.slug, "project_slug": self.project.slug, }, ) self.login_as(user=self.user) out = BytesIO() f = zipfile.ZipFile(out, "w") f.writestr("proguard/%s.txt" % PROGUARD_UUID, PROGUARD_SOURCE) f.writestr("ignored-file.txt", b"This is just some stuff") f.close() response = self.client.post( url, { "file": SimpleUploadedFile("symbols.zip", out.getvalue(), content_type="application/zip") }, format="multipart", ) assert response.status_code == 201, response.content assert len(response.data) == 1 event_data = { "user": { "ip_address": "31.172.207.97" }, "extra": {}, "project": self.project.id, "platform": "java", "debug_meta": { "images": [{ "type": "proguard", "uuid": PROGUARD_UUID }] }, "exception": { "values": [{ "stacktrace": { "frames": [ { "function": "a", "abs_path": None, "module": "org.a.b.g$a", "filename": None, "lineno": 67, }, { "function": "a", "abs_path": None, "module": "org.a.b.g$a", "filename": None, "lineno": 69, }, ] }, "module": "org.a.b", "type": "g$a", "value": "Shit broke yo", }] }, "timestamp": iso_format(before_now(seconds=1)), } event = self.post_and_retrieve_event(event_data) if not self.use_relay(): # We measure the number of queries after an initial post, # because there are many queries polluting the array # before the actual "processing" happens (like, auth_user) with self.assertWriteQueries({ "nodestore_node": 2, "sentry_eventuser": 1, "sentry_groupedmessage": 1, "sentry_userreport": 1, }): self.post_and_retrieve_event(event_data) exc = event.interfaces["exception"].values[0] bt = exc.stacktrace frames = bt.frames assert exc.type == "Util$ClassContextSecurityManager" assert exc.module == "org.slf4j.helpers" assert frames[0].function == "getClassContext" assert frames[ 0].module == "org.slf4j.helpers.Util$ClassContextSecurityManager" assert frames[1].function == "getExtraClassContext" assert frames[ 1].module == "org.slf4j.helpers.Util$ClassContextSecurityManager" assert event.culprit == ( "org.slf4j.helpers.Util$ClassContextSecurityManager " "in getExtraClassContext")
def test_error_on_resolving(self): url = reverse( "sentry-api-0-dsym-files", kwargs={ "organization_slug": self.project.organization.slug, "project_slug": self.project.slug, }, ) self.login_as(user=self.user) out = BytesIO() f = zipfile.ZipFile(out, "w") f.writestr("proguard/%s.txt" % PROGUARD_BUG_UUID, PROGUARD_BUG_SOURCE) f.close() response = self.client.post( url, { "file": SimpleUploadedFile("symbols.zip", out.getvalue(), content_type="application/zip") }, format="multipart", ) assert response.status_code == 201, response.content assert len(response.data) == 1 event_data = { "user": { "ip_address": "31.172.207.97" }, "extra": {}, "project": self.project.id, "platform": "java", "debug_meta": { "images": [{ "type": "proguard", "uuid": PROGUARD_BUG_UUID }] }, "exception": { "values": [{ "stacktrace": { "frames": [ { "function": "a", "abs_path": None, "module": "org.a.b.g$a", "filename": None, "lineno": 67, }, { "function": "a", "abs_path": None, "module": "org.a.b.g$a", "filename": None, "lineno": 69, }, ] }, "type": "RuntimeException", "value": "Shit broke yo", }] }, "timestamp": iso_format(before_now(seconds=1)), } event = self.post_and_retrieve_event(event_data) assert len(event.data["errors"]) == 1 assert event.data["errors"][0] == { "mapping_uuid": u"071207ac-b491-4a74-957c-2c94fd9594f2", "type": "proguard_missing_lineno", }
def render(r, settings=language.Settings()): r = r.resolve(settings) s = BytesIO() assert language.serve(r, s, settings) return s.getvalue()
def encode(self, text_utf8, text_latex, inputenc=None, errors='strict'): encoding = 'latex+' + inputenc if inputenc else 'latex' stream = BytesIO() writer = codecs.getwriter(encoding)(stream, errors=errors) writer.write(text_utf8) self.assertEqual(text_latex, stream.getvalue())