def write(self, data, msg_type, **options): '''Serializes and pushes single data object to a wrapped stream. :Parameters: - `data` - data to be serialized - `msg_type` (one of the constants defined in :class:`.MessageType`) - type of the message :Options: - `single_char_strings` (`boolean`) - if ``True`` single char Python strings are encoded as q strings instead of chars, **Default**: ``False`` :returns: if wraped stream is ``None`` serialized data, otherwise ``None`` ''' self._buffer = BytesIO() self._options = MetaData(**CONVERSION_OPTIONS.union_dict(**options)) # header and placeholder for message size self._buffer.write(('%s%s\0\0\0\0\0\0' % (ENDIANESS, chr(msg_type))).encode("latin-1")) self._write(data) # update message size data_size = self._buffer.tell() self._buffer.seek(4) self._buffer.write(struct.pack('i', data_size)) # write data to socket if self._stream: self._stream.sendall(self._buffer.getvalue()) else: return self._buffer.getvalue()
def add_from_dataframe(self, dataframe, data_type_id, name, description): """ Serialize the specified DataFrame and upload it as a new dataset. Parameters ---------- dataframe : pandas.DataFrame Data to serialize. data_type_id : str Format to serialize to. Supported formats are: 'PlainText' 'GenericCSV' 'GenericTSV' 'GenericCSVNoHeader' 'GenericTSVNoHeader' See the azureml.DataTypeIds class for constants. name : str Name for the new dataset. description : str Description for the new dataset. Returns ------- SourceDataset Dataset that was just created. Use open(), read_as_binary(), read_as_text() or to_dataframe() on the dataset object to get its contents as a stream, bytes, str or pandas DataFrame. """ _not_none('dataframe', dataframe) _not_none_or_empty('data_type_id', data_type_id) _not_none_or_empty('name', name) _not_none_or_empty('description', description) try: output = BytesIO() serialize_dataframe(output, data_type_id, dataframe) raw_data = output.getvalue() finally: output.close() return self._upload(raw_data, data_type_id, name, description)
def _update_from_dataframe(self, dataframe, data_type_id=None, name=None, description=None): """ Serialize the specified DataFrame and replace the existing dataset. Parameters ---------- dataframe : pandas.DataFrame Data to serialize. data_type_id : str, optional Format to serialize to. If None, the existing format is preserved. Supported formats are: 'PlainText' 'GenericCSV' 'GenericTSV' 'GenericCSVNoHeader' 'GenericTSVNoHeader' See the azureml.DataTypeIds class for constants. name : str, optional Name for the dataset. If None, the name of the existing dataset is used. description : str, optional Description for the dataset. If None, the name of the existing dataset is used. """ _not_none('dataframe', dataframe) if data_type_id is None: data_type_id = self.data_type_id if name is None: name = self.name if description is None: description = self.description try: output = BytesIO() serialize_dataframe(output, data_type_id, dataframe) raw_data = output.getvalue() finally: output.close() self._upload_and_refresh(raw_data, data_type_id, name, description)
def encode(self): buf = BytesIO() buf.write(lcmt_call_python_data._get_packed_fingerprint()) self._encode_one(buf) return buf.getvalue()
class QWriter(object): ''' Provides serialization to q IPC protocol. :Parameters: - `stream` (`socket` or `None`) - stream for data serialization - `protocol_version` (`integer`) - version IPC protocol ''' _writer_map = {} serialize = Mapper(_writer_map) def __new__(cls, *args, **kwargs): if cls is QWriter: # try to load optional pandas binding try: from qpython._pandas import PandasQWriter return super(QWriter, cls).__new__(PandasQWriter) except ImportError: return super(QWriter, cls).__new__(QWriter) else: #return super(QWriter, cls).__new__(cls) return super().__new__(cls) #komsit fix def __init__(self, stream, protocol_version): self._stream = stream self._protocol_version = protocol_version def write(self, data, msg_type, **options): '''Serializes and pushes single data object to a wrapped stream. :Parameters: - `data` - data to be serialized - `msg_type` (one of the constants defined in :class:`.MessageType`) - type of the message :Options: - `single_char_strings` (`boolean`) - if ``True`` single char Python strings are encoded as q strings instead of chars, **Default**: ``False`` :returns: if wraped stream is ``None`` serialized data, otherwise ``None`` ''' self._buffer = BytesIO() self._options = MetaData(**CONVERSION_OPTIONS.union_dict(**options)) # header and placeholder for message size self._buffer.write(('%s%s\0\0\0\0\0\0' % (ENDIANESS, chr(msg_type))).encode("latin-1")) self._write(data) # update message size data_size = self._buffer.tell() self._buffer.seek(4) self._buffer.write(struct.pack('i', data_size)) # write data to socket if self._stream: self._stream.sendall(self._buffer.getvalue()) else: return self._buffer.getvalue() def _write(self, data): if data is None: self._write_null() else: if isinstance(data, Exception) or (type(data) == type and issubclass(data, Exception)): data_type = Exception else: data_type = type(data) writer = self._writer_map.get(data_type, None) if writer: writer(self, data) else: qtype = Q_TYPE.get(type(data), None) if qtype: self._write_atom(data, qtype) else: raise QWriterException('Unable to serialize type: %s' % data.__class__ if isinstance(data, object) else type(data)) def _write_null(self): self._buffer.write(struct.pack('=bx', QNULL)) @serialize(Exception) def _write_error(self, data): self._buffer.write(struct.pack('b', QERROR)) if isinstance(data, Exception): msg = data.__class__.__name__ if data.args: msg = data.args[0] else: msg = data.__name__ self._buffer.write(msg.encode("latin-1")) self._buffer.write(b'\0') def _write_atom(self, data, qtype): try: self._buffer.write(struct.pack('b', qtype)) fmt = STRUCT_MAP[qtype] self._buffer.write(struct.pack(fmt, data)) except KeyError: raise QWriterException('Unable to serialize type: %s' % data.__class__ if isinstance(data, object) else type(data)) @serialize(tuple, list) def _write_generic_list(self, data): self._buffer.write(struct.pack('=bxi', QGENERAL_LIST, len(data))) for element in data: self._write(element) @serialize(str, bytes) def _write_string(self, data): if not self._options.single_char_strings and len(data) == 1: self._write_atom(ord(data), QCHAR) else: self._buffer.write(struct.pack('=bxi', QSTRING, len(data))) if isinstance(data, str): self._buffer.write(data.encode("latin-1")) else: self._buffer.write(data) @serialize(numpy.string_) def _write_symbol(self, data): self._buffer.write(struct.pack('=b', QSYMBOL)) if data: self._buffer.write(data) self._buffer.write(b'\0') @serialize(uuid.UUID) def _write_guid(self, data): if self._protocol_version < 3: raise QWriterException('kdb+ protocol version violation: Guid not supported pre kdb+ v3.0') self._buffer.write(struct.pack('=b', QGUID)) self._buffer.write(data.bytes) @serialize(QTemporal) def _write_temporal(self, data): try: if self._protocol_version < 1 and (data.meta.qtype == QTIMESPAN or data.meta.qtype == QTIMESTAMP): raise QWriterException('kdb+ protocol version violation: data type %s not supported pre kdb+ v2.6' % hex(data.meta.qtype)) self._buffer.write(struct.pack('=b', data.meta.qtype)) fmt = STRUCT_MAP[data.meta.qtype] self._buffer.write(struct.pack(fmt, to_raw_qtemporal(data.raw, data.meta.qtype))) except KeyError: raise QWriterException('Unable to serialize type: %s' % type(data)) @serialize(numpy.datetime64, numpy.timedelta64) def _write_numpy_temporal(self, data): try: qtype = TEMPORAL_PY_TYPE[str(data.dtype)] if self._protocol_version < 1 and (qtype == QTIMESPAN or qtype == QTIMESTAMP): raise QWriterException('kdb+ protocol version violation: data type %s not supported pre kdb+ v2.6' % hex(qtype)) self._buffer.write(struct.pack('=b', qtype)) fmt = STRUCT_MAP[qtype] self._buffer.write(struct.pack(fmt, to_raw_qtemporal(data, qtype))) except KeyError: raise QWriterException('Unable to serialize type: %s' % data.dtype) @serialize(QLambda) def _write_lambda(self, data): self._buffer.write(struct.pack('=b', QLAMBDA)) self._buffer.write(b'\0') self._write_string(data.expression) @serialize(QProjection) def _write_projection(self, data): self._buffer.write(struct.pack('=bi', QPROJECTION, len(data.parameters))) for parameter in data.parameters: self._write(parameter) @serialize(QDictionary, QKeyedTable) def _write_dictionary(self, data): self._buffer.write(struct.pack('=b', QDICTIONARY)) self._write(data.keys) self._write(data.values) @serialize(QTable) def _write_table(self, data): self._buffer.write(struct.pack('=bxb', QTABLE, QDICTIONARY)) self._write(qlist(numpy.array(data.dtype.names), qtype = QSYMBOL_LIST)) self._buffer.write(struct.pack('=bxi', QGENERAL_LIST, len(data.dtype))) for column in data.dtype.names: self._write_list(data[column], data.meta[column]) @serialize(numpy.ndarray, QList, QTemporalList) def _write_list(self, data, qtype = None): if qtype is not None: qtype = -abs(qtype) if qtype is None: qtype = get_list_qtype(data) if self._protocol_version < 1 and (abs(qtype) == QTIMESPAN_LIST or abs(qtype) == QTIMESTAMP_LIST): raise QWriterException('kdb+ protocol version violation: data type %s not supported pre kdb+ v2.6' % hex(data.meta.qtype)) if qtype == QGENERAL_LIST: self._write_generic_list(data) elif qtype == QCHAR: self._write_string(data.tostring()) else: self._buffer.write(struct.pack('=bxi', -qtype, len(data))) if data.dtype.type in (numpy.datetime64, numpy.timedelta64): # convert numpy temporal to raw q temporal data = array_to_raw_qtemporal(data, qtype = qtype) if qtype == QSYMBOL: for symbol in data: if symbol: self._buffer.write(symbol) self._buffer.write(b'\0') elif qtype == QGUID: if self._protocol_version < 3: raise QWriterException('kdb+ protocol version violation: Guid not supported pre kdb+ v3.0') for guid in data: self._buffer.write(guid.bytes) else: self._buffer.write(data.tostring())
def download_submodule(author, module_name, dest_path, ignore_list): """ Downloads a submodule from the given author and module name, and extracts all files which are not on the ignore_list to the dest_path. Example: download_submodule("tobspr", "RenderPipeline", ".", ["README.md", "LICENSE"]) """ # Make directory, if it does not exist yet if not os.path.isdir(dest_path): os.makedirs(dest_path) # Construct download url source_url = "https://github.com/" + author + "/" + module_name + "/archive/master.zip" prefix = module_name + "-master" print("Fetching:", source_url) try: # Python 2.7 import urllib urlopen = urllib.urlopen except: # Python 3.4 import urllib.request urlopen = urllib.request.urlopen # Download the zip try: usock = urlopen(source_url) zip_data = usock.read() usock.close() except Exception as msg: print("ERROR: Could not fetch module", module_name, "! Reason:", msg, file=sys.stderr) sys.exit(2) # Extract the zip zip_ptr = BytesIO(zip_data) try: zip_handle = zipfile.ZipFile(zip_ptr) except zipfile.BadZipfile: print("ERROR: Invalid zip file!", file=sys.stderr) sys.exit(3) if zip_handle.testzip() is not None: print("ERROR: Invalid zip file checksums!", file=sys.stderr) sys.exit(1) num_files, num_dirs = 0, 0 for fname in zip_handle.namelist(): rel_name = fname.replace(prefix, "").replace("\\", "/").lstrip("/") if not rel_name: continue is_file = not rel_name.endswith("/") rel_name = dest_path.rstrip("/\\") + "/" + rel_name # Files if is_file: for ignore in ignore_list: if ignore in rel_name: break else: with zip_handle.open(fname, "r") as source, open(rel_name, "wb") as dest: shutil.copyfileobj(source, dest) num_files += 1 # Directories else: if not os.path.isdir(rel_name): os.makedirs(rel_name) num_dirs += 1 print("Extracted", num_files, "files and", num_dirs, "directories")
def encode(self): buf = BytesIO() buf.write(mavlink_message_t._get_packed_fingerprint()) self._encode_one(buf) return buf.getvalue()
def encode(self): buf = BytesIO() buf.write(lcmt_joint_pd_override._get_packed_fingerprint()) self._encode_one(buf) return buf.getvalue()
def encode(self): buf = BytesIO() buf.write(ZcmRadarContiObjectsScan._get_packed_fingerprint()) self._encode_one(buf) return buf.getvalue()
def __init__(self, buf=None): if buf is None: buf = BytesIO() self.buf = buf
from cStringIO import StringIO as BytesIO except ImportError: from io import BytesIO import pydoc import lxml.etree html = pydoc.HTMLDoc() # Generate the documentation from the pydoc strings object, name = pydoc.resolve('pycdlib.pycdlib', 0) page = html.page(pydoc.describe(object), html.document(object, name)) # Now parse that documentation parser = lxml.etree.HTMLParser() tree = lxml.etree.parse(BytesIO(page.encode('ascii')), parser) # Now we remove the "Modules" section, since it contains only links to parts # of the API that we are not documenting doc = tree.getroot() tables = doc.xpath('/html/body/table') remove_table = None for table in tables: for tr in table.xpath('tr'): bgcolor = tr.get('bgcolor') if bgcolor == '#aa55cc': # We found the 'Modules' section; go back up to the table to remove it remove_table = table break if remove_table is not None:
def encode(self): buf = BytesIO() buf.write(lcmt_inverse_dynamics_debug_info._get_packed_fingerprint()) self._encode_one(buf) return buf.getvalue()
def patch(src_bytes, patch_bytes): """patch(src_bytes, patch_bytes) -> bytes Apply the BSDIFF4-format patch_bytes to src_bytes and return the bytes. """ return core.patch(src_bytes, *read_patch(BytesIO(patch_bytes)))
def bootstrap_dependency(settings, url, hash_, priority, on_complete): """ Downloads a dependency from a hard-coded URL - only used for bootstrapping _ssl on Linux and ST2/Windows :param settings: Package Control settings :param url: The non-secure URL to download from :param hash_: The sha256 hash of the package file :param priority: A three-digit number that controls what order packages are injected in :param on_complete: A callback to be run in the main Sublime thread, so it can use the API """ package_filename = path.basename(urlparse(url).path) package_basename, _ = path.splitext(package_filename) packages_dir = path.join(st_dir, u'Packages') if not packages_dir: return package_dir = path.join(packages_dir, package_basename) # The package has already been installed if path.exists(package_dir): return with downloader(url, settings) as manager: try: console_write( u'Downloading bootstrapped dependency %s' % package_basename, True) data = manager.fetch( url, 'Error downloading bootstrapped dependency %s.' % package_basename) console_write( u'Successfully downloaded bootstraped dependency %s' % package_basename, True) data_io = BytesIO(data) except (DownloaderException) as e: console_write(u'%s' % str(e), True) return data_hash = hashlib.sha256(data).hexdigest() if data_hash != hash_: console_write( u'Error validating bootstrapped dependency %s (got %s instead of %s)' % (package_basename, data_hash, hash_), True) return try: data_zip = zipfile.ZipFile(data_io, 'r') except (zipfile.BadZipfile): console_write( u'Error unzipping bootstrapped dependency %s' % package_filename, True) return if not path.exists(package_dir): os.mkdir(package_dir, 0o755) code = None for zip_path in data_zip.namelist(): dest = zip_path if not isinstance(dest, str_cls): dest = dest.decode('utf-8', 'strict') dest = dest.replace('\\', '/') if dest == u'loader.py': code = data_zip.read(zip_path).decode('utf-8') continue dest = path.join(package_dir, dest) if dest[-1] == '/': if not path.exists(dest): os.mkdir(dest, 0o755) else: dest_dir = path.dirname(dest) if not path.exists(dest_dir): os.mkdir(dest_dir, 0o755) with open(dest, 'wb') as f: f.write(data_zip.read(zip_path)) data_zip.close() loader.add(priority, package_basename, code) console_write( u'Successfully installed bootstrapped dependency %s' % package_basename, True) if on_complete: sublime.set_timeout(on_complete, 100)
def encode(self): buf = BytesIO() buf.write( lcmt_manipulator_plan_move_end_effector._get_packed_fingerprint()) self._encode_one(buf) return buf.getvalue()
def encode(self): buf = BytesIO() buf.write(viewer_link_data_t._get_packed_fingerprint()) self._encode_one(buf) return buf.getvalue()
def test_reading_numpy_temporals(): BINARY = OrderedDict() with open('tests/QExpressions3.out', 'rb') as f: while True: query = f.readline().strip() binary = f.readline().strip() if not binary: break BINARY[query] = binary print('Deserialization (numpy temporals)') for query, value in iter(NUMPY_TEMPORAL_EXPRESSIONS.items()): buffer_ = BytesIO() binary = binascii.unhexlify(BINARY[query]) buffer_.write(b'\1\0\0\0') buffer_.write(struct.pack('i', len(binary) + 8)) buffer_.write(binary) buffer_.seek(0) sys.stdout.write( ' %-75s' % query ) try: buffer_.seek(0) stream_reader = qreader.QReader(buffer_) result = stream_reader.read(numpy_temporals = True).data assert compare(value, result), 'deserialization failed: %s, expected: %s actual: %s' % (query, value, result) print('.') except QException as e: assert isinstance(value, QException) assert e.args == value.args print('.')
def encode(self): buf = BytesIO() buf.write(control_parameter_respones_lcmt._get_packed_fingerprint()) self._encode_one(buf) return buf.getvalue()
def test_reading_pandas(): print('Deserialization (pandas)') for query, value in iter(PANDAS_EXPRESSIONS.items()): buffer_ = BytesIO() binary = binascii.unhexlify(BINARY[query]) buffer_.write(b'\1\0\0\0') buffer_.write(struct.pack('i', len(binary) + 8)) buffer_.write(binary) buffer_.seek(0) sys.stdout.write(' %-75s' % query) try: buffer_.seek(0) stream_reader = PandasQReader(buffer_) result = stream_reader.read(pandas = True).data if isinstance(value, dict): if 'index' in value: meta = result.meta result = result.reset_index() result._metadata = ["meta"] result.meta = meta if not 'compare_meta' in value or value['compare_meta']: assert value['meta'].as_dict() == result.meta.as_dict(), 'deserialization failed qtype: %s, expected: %s actual: %s' % (query, value['meta'], result.meta) assert compare(value['data'], result), 'deserialization failed: %s, expected: %s actual: %s' % (query, value['data'], result) else: assert compare(value, result), 'deserialization failed: %s, expected: %s actual: %s' % (query, value, result) print('.') except QException as e: assert isinstance(value, QException) assert e.message == value.message print('.')
def encode(self): buf = BytesIO() buf.write(entropy_trend_data._get_packed_fingerprint()) self._encode_one(buf) return buf.getvalue()
def import_(self, command): if not command.url: return { 'cmd': '', 'cwd': self.path, 'output': "Repository data lacks the 'url' value", 'returncode': 1 } # clear destination if os.path.exists(self.path): for filename in os.listdir(self.path): path = os.path.join(self.path, filename) try: shutil.rmtree(path) except OSError: os.remove(path) else: not_exist = self._create_path() if not_exist: return not_exist # download zipfile try: data = load_url(command.url, retry=command.retry) except URLError as e: return { 'cmd': '', 'cwd': self.path, 'output': "Could not fetch zipfile from '%s': %s" % (command.url, e), 'returncode': 1 } def create_path(path): if not os.path.exists(path): try: os.makedirs(path) except os.error as e: return { 'cmd': 'os.makedirs(%s)' % path, 'cwd': path, 'output': "Could not create directory '%s': %s" % (path, e), 'returncode': 1 } return None # unpack zipfile into destination try: zip_file = zipfile.ZipFile(BytesIO(data), mode='r') except zipfile.BadZipfile as e: return { 'cmd': 'ZipFile(%s)' % command.url, 'cwd': self.path, 'output': "Could not read zipfile from '%s': %s" % (command.url, e), 'returncode': 1 } try: if not command.version: zip_file.extractall(self.path) else: prefix = str(command.version) + '/' for name in zip_file.namelist(): if name.startswith(prefix): if not name[len(prefix):]: continue # remap members from version subfolder into destination dst = os.path.join(self.path, name[len(prefix):]) if dst.endswith('/'): # create directories not_exist = create_path(dst) if not_exist: return not_exist else: with zip_file.open(name, mode='r') as src_handle: with open(dst, 'wb') as dst_handle: dst_handle.write(src_handle.read()) finally: zip_file.close() return { 'cmd': '', 'cwd': self.path, 'output': "Downloaded zipfile from '%s' and unpacked it" % command.url, 'returncode': 0 }
def encode(self): buf = BytesIO() buf.write(ZcmObstacleDetectorPoint2D._get_packed_fingerprint()) self._encode_one(buf) return buf.getvalue()
def decode_bytes(buf): """Read a varint from from `buf` bytes""" return decode_stream(BytesIO(buf))
def _parse(self, stream, context): data = _read_stream(stream, self._sizeof(context)) stream2 = BytesIO(self.decoder(data)) return self.subcon._parse(stream2, context)
def encode(self): buf = BytesIO() buf.write(ZcmSauglMssduTimestamp._get_packed_fingerprint()) self._encode_one(buf) return buf.getvalue()
def __init__(self, key, expire): super(MemoryBufferChunk, self).__init__(key, expire) self._buf = BytesIO()
def getAsFile(self): return BytesIO(self.get())
def purge(self): self._buf = BytesIO()
def encode(self): buf = BytesIO() buf.write(ZcmCamWDSnowground._get_packed_fingerprint()) self._encode_one(buf) return buf.getvalue()
def encode(self): buf = BytesIO() buf.write(timestamped_waypoint_t._get_packed_fingerprint()) self._encode_one(buf) return buf.getvalue()
def encode(self): buf = BytesIO() buf.write(gps._get_packed_fingerprint()) self._encode_one(buf) return buf.getvalue()
def __init__(self, socket, encoding=None): super(SocketFile, self).__init__() self._socket = socket self._buffer = BytesIO() self.encoding = encoding
def test_reading_pandas(): print("Deserialization (pandas)") for query, value in iter(PANDAS_EXPRESSIONS.items()): buffer_ = BytesIO() binary = binascii.unhexlify(BINARY[query]) buffer_.write(b"\1\0\0\0") buffer_.write(struct.pack("i", len(binary) + 8)) buffer_.write(binary) buffer_.seek(0) sys.stdout.write(" %-75s" % query) try: buffer_.seek(0) stream_reader = qreader.QReader(buffer_) result = stream_reader.read(pandas=True).data if isinstance(value, dict): if "index" in value: meta = result.meta result = result.reset_index() result.meta = meta if not "compare_meta" in value or value["compare_meta"]: assert value["meta"].as_dict() == result.meta.as_dict(), ( "deserialization failed qtype: %s, expected: %s actual: %s" % (query, value["meta"], result.meta) ) assert compare(value["data"], result), "deserialization failed: %s, expected: %s actual: %s" % ( query, value["data"], result, ) else: assert compare(value, result), "deserialization failed: %s, expected: %s actual: %s" % ( query, value, result, ) print(".") except QException as e: assert isinstance(value, QException) assert e.message == value.message print(".")
def decode(s, router=None): return _decode(BytesIO(s).read, router)
def setUp(self): self.source = BytesIO(six.b('file-contents'))
def _derive_with_pil(self, im, target_fp, image_request, image_info, rotate=True, crop=True): ''' Once you have a PIL.Image, this can be used to do the IIIF operations. Args: im (PIL.Image) target_fp (str) image_request (ImageRequest) image_info (ImageInfo) rotate (bool): True by default; can be set to False in case the rotation was done further upstream. crop (bool): True by default; can be set to False when the region was already extracted further upstream. Returns: void (puts an image at target_fp) ''' region_param = image_request.region_param(image_info=image_info) if crop and region_param.canonical_uri_value != 'full': # For PIL: "The box is a 4-tuple defining the left, upper, right, # and lower pixel coordinate." box = (region_param.pixel_x, region_param.pixel_y, region_param.pixel_x + region_param.pixel_w, region_param.pixel_y + region_param.pixel_h) logger.debug('cropping to: %r', box) im = im.crop(box) # resize size_param = image_request.size_param(image_info=image_info) if size_param.canonical_uri_value != 'full': wh = [int(size_param.w), int(size_param.h)] logger.debug('Resizing to: %r', wh) im = im.resize(wh, resample=Image.ANTIALIAS) rotation_param = image_request.rotation_param() if rotation_param.mirror: im = mirror(im) try: if self.map_profile_to_srgb and 'icc_profile' in im.info: embedded_profile = BytesIO(im.info['icc_profile']) im = self._map_im_profile_to_srgb(im, embedded_profile) except PyCMSError as err: logger.warn('Error converting %r (%r) to sRGB: %r', image_request.ident, image_info.src_img_fp, err) if rotation_param.rotation != '0' and rotate: r = 0 - float(rotation_param.rotation) # We need to convert pngs here and not below if we want a # transparent background (A == Alpha layer) if (float(rotation_param.rotation) % 90 != 0.0 and image_request.format == 'png'): if image_request.quality in ('gray', 'bitonal'): im = im.convert('LA') else: im = im.convert('RGBA') im = im.rotate(r, expand=True) # If the source format is a PNG image with transparency (mode RGBA) # and we're writing as a non-transparent format (e.g. RGB), we need # to remove the transparency here. if (not im.mode.endswith('A') or (im.mode == 'RGBA' and image_request.format != 'png')): if (im.mode != "RGB" and image_request.quality not in ('gray', 'bitonal')): im = im.convert("RGB") elif image_request.quality == 'gray': im = im.convert('L') elif image_request.quality == 'bitonal': # not 1-bit w. JPG dither = Image.FLOYDSTEINBERG if self.dither_bitonal_images else Image.NONE im = im.convert('1', dither=dither) if image_request.format == 'jpg': # see http://pillow.readthedocs.org/en/latest/handbook/image-file-formats.html#jpeg im.save(target_fp, quality=90) elif image_request.format == 'png': # see http://pillow.readthedocs.org/en/latest/handbook/image-file-formats.html#png im.save(target_fp, optimize=True, bits=256) elif image_request.format == 'gif': # see http://pillow.readthedocs.org/en/latest/handbook/image-file-formats.html#gif im.save(target_fp) elif image_request.format == 'webp': # see http://pillow.readthedocs.org/en/latest/handbook/image-file-formats.html#webp im.save(target_fp, quality=90) elif image_request.format == 'tif': # see http://pillow.readthedocs.io/en/latest/handbook/image-file-formats.html#tiff im.save(target_fp, compression='None')
def test_reading_compressed(): BINARY = OrderedDict() with open('tests/QCompressedExpressions3.out', 'rb') as f: while True: query = f.readline().strip() binary = f.readline().strip() if not binary: break BINARY[query] = binary print('Compressed deserialization') buffer_reader = qreader.QReader(None) for query, value in iter(COMPRESSED_EXPRESSIONS.items()): buffer_ = BytesIO() binary = binascii.unhexlify(BINARY[query]) buffer_.write(b'\1\0\1\0') buffer_.write(struct.pack('i', len(binary) + 8)) buffer_.write(binary) buffer_.seek(0) sys.stdout.write( ' %-75s' % query ) try: result = buffer_reader.read(source = buffer_.getvalue()).data assert compare(value, result), 'deserialization failed: %s' % (query) header = buffer_reader.read_header(source = buffer_.getvalue()) result = buffer_reader.read_data(message_size = header.size, is_compressed = header.is_compressed) assert compare(value, result), 'deserialization failed: %s' % (query) stream_reader = qreader.QReader(buffer_) result = stream_reader.read().data assert compare(value, result), 'deserialization failed: %s' % (query) print('.') except QException as e: assert isinstance(value, QException) assert e.args == value.args print('.')
def test_reading_pandas(): print('Deserialization (pandas)') for query, value in iter(PANDAS_EXPRESSIONS.items()): buffer_ = BytesIO() binary = binascii.unhexlify(BINARY[query]) buffer_.write(b'\1\0\0\0') buffer_.write(struct.pack('i', len(binary) + 8)) buffer_.write(binary) buffer_.seek(0) sys.stdout.write(' %-75s' % query) try: buffer_.seek(0) stream_reader = PandasQReader(buffer_) result = stream_reader.read(pandas = True).data if isinstance(value, dict): if 'index' in value: meta = result.meta result = result.reset_index() result.meta = meta if not 'compare_meta' in value or value['compare_meta']: assert value['meta'].as_dict() == result.meta.as_dict(), 'deserialization failed qtype: %s, expected: %s actual: %s' % (query, value['meta'], result.meta) assert compare(value['data'], result), 'deserialization failed: %s, expected: %s actual: %s' % (query, value['data'], result) else: assert compare(value, result), 'deserialization failed: %s, expected: %s actual: %s' % (query, value, result) print('.') except QException as e: assert isinstance(value, QException) assert e.message == value.message print('.')