示例#1
0
    def test_pickle_wrapper(self):
        fh = BytesIO()  # use cPickle with binary content

        # test unwrapped results load save pickle
        self.results._results.save(fh)
        fh.seek(0, 0)
        res_unpickled = self.results._results.__class__.load(fh)
        assert type(res_unpickled) is type(self.results._results)  # noqa:E721
        # TODO: Check equality instead?  This check isnt exactly meaningful

        # test wrapped results load save
        fh.seek(0, 0)
        self.results.save(fh)
        fh.seek(0, 0)
        res_unpickled = self.results.__class__.load(fh)
        fh.close()
        assert type(res_unpickled) is type(self.results)  # noqa:E721
        # TODO: Check equality instead?  This check isnt exactly meaningful

        before = sorted(iterkeys(self.results.__dict__))
        after = sorted(iterkeys(res_unpickled.__dict__))
        assert before == after

        before = sorted(iterkeys(self.results._results.__dict__))
        after = sorted(iterkeys(res_unpickled._results.__dict__))
        assert before == after

        before = sorted(iterkeys(self.results.model.__dict__))
        after = sorted(iterkeys(res_unpickled.model.__dict__))
        assert before == after

        before = sorted(iterkeys(self.results._cache))
        after = sorted(iterkeys(res_unpickled._cache))
        assert before == after
示例#2
0
 def __iter__(self):
     if self.is_zipped:
         byte_stream = BytesIO(self.response.content)
         with zipfile.ZipFile(byte_stream) as self.zipfile:
             for name in self.zipfile.namelist():
                 with self.zipfile.open(name) as single_file:
                     if name[-3:] == 'csv':
                         reader = self.get_csv_reader(single_file)
                     else:
                         reader = single_file
                     reader_iterator = iter(reader)
                     if self.is_header_present:
                         next(reader_iterator)
                     for line in reader_iterator:
                         yield self._parse_line(line)
         byte_stream.close()
     else:
         stream = codecs.iterdecode(
             self.response.iter_lines(), self.response.encoding
             or self.response.apparent_encoding)
         reader = csv.reader(stream, delimiter=self.delimiter)
         reader_iterator = iter(reader)
         if self.is_header_present:
             next(reader_iterator)
         for line in reader_iterator:
             yield self._parse_line(line)
         stream.close()
示例#3
0
 def getDataFromService(self, srvMethod, params):
     self.infoMsg(
         "Sending header request to %s" % self.getDestinationService(),
         str(params))
     result = self._sendTransferHeader(srvMethod, params)
     if not result["OK"]:
         self.errMsg("Could not send header", result["Message"])
         return result
     self.infoMsg("Starting to receive data from service")
     _, srvTransport = result["Value"]
     srvFileHelper = FileHelper(srvTransport)
     srvFileHelper.setDirection("receive")
     sIO = BytesIO()
     result = srvFileHelper.networkToDataSink(sIO,
                                              self.__transferBytesLimit)
     if not result["OK"]:
         self.errMsg("Could not receive data from server",
                     result["Message"])
         srvTransport.close()
         sIO.close()
         return result
     dataReceived = sIO.getvalue()
     sIO.close()
     self.infoMsg("Received %s bytes from service" % len(dataReceived))
     retVal = srvTransport.receiveData()
     srvTransport.close()
     if not retVal["OK"]:
         return retVal
     return S_OK({"data": dataReceived, "srvResponse": retVal})
示例#4
0
 def updateBundles(self):
   dirsToBundle = self.__getDirsToBundle()
   # Delete bundles that don't have to be updated
   for bId in self.__bundles:
     if bId not in dirsToBundle:
       gLogger.info("Deleting old bundle %s" % bId)
       del(self.__bundles[bId])
   for bId in dirsToBundle:
     bundlePaths = dirsToBundle[bId]
     gLogger.info("Updating %s bundle %s" % (bId, bundlePaths))
     buffer_ = BytesIO()
     filesToBundle = sorted(File.getGlobbedFiles(bundlePaths))
     if filesToBundle:
       commonPath = os.path.commonprefix(filesToBundle)
       commonEnd = len(commonPath)
       gLogger.info("Bundle will have %s files with common path %s" % (len(filesToBundle), commonPath))
       with tarfile.open('dummy', "w:gz", buffer_) as tarBuffer:
         for filePath in filesToBundle:
           tarBuffer.add(filePath, filePath[commonEnd:])
       zippedData = buffer_.getvalue()
       buffer_.close()
       hash_ = File.getMD5ForFiles(filesToBundle)
       gLogger.info("Bundled %s : %s bytes (%s)" % (bId, len(zippedData), hash_))
       self.__bundles[bId] = (hash_, zippedData)
     else:
       self.__bundles[bId] = (None, None)
示例#5
0
文件: snappy.py 项目: wglass/kiel
def decompress(data):
    """
    Decompresses the given data via the snappy algorithm.

    If ``python-snappy`` is not installed a ``RuntimeError`` is raised.
    """
    if not snappy_available:
        raise RuntimeError("Snappy compression unavailable.")

    buff_offset = len(raw_header)  # skip the header
    length = len(data) - len(raw_header)

    output = BytesIO()

    while buff_offset <= length:
        block_size = struct.unpack_from("!i", data, buff_offset)[0]
        buff_offset += struct.calcsize("!i")

        block = struct.unpack_from("!%ds" % block_size, data, buff_offset)[0]
        buff_offset += block_size

        output.write(snappy.uncompress(block))

    result = output.getvalue()

    output.close()

    return result
示例#6
0
def compress(data):
    """
    Compresses given data via the snappy algorithm.

    The result is preceded with a header containing the string 'SNAPPY' and the
    default and min-compat versions (both ``1``).

    The block size for the compression is hard-coded at 32kb.

    If ``python-snappy`` is not installed a ``RuntimeError`` is raised.
    """
    if not snappy_available:
        raise RuntimeError("Snappy compression unavailable.")

    buff = BytesIO()
    buff.write(raw_header)

    for block_num in range(0, len(data), BLOCK_SIZE):
        block = data[block_num:block_num + BLOCK_SIZE]
        compressed = snappy.compress(block)

        buff.write(struct.pack("!i", len(compressed)))
        buff.write(compressed)

    result = buff.getvalue()

    buff.close()

    return result
示例#7
0
    def serialize(
            self, destination=None, encoding="utf-8", format='xml', **args):

        if self.type in ('CONSTRUCT', 'DESCRIBE'):
            return self.graph.serialize(
                destination, encoding=encoding, format=format, **args)

        """stolen wholesale from graph.serialize"""
        from rdflib import plugin
        serializer = plugin.get(format, ResultSerializer)(self)
        if destination is None:
            stream = BytesIO()
            stream2 = EncodeOnlyUnicode(stream)
            serializer.serialize(stream2, encoding=encoding, **args)
            return stream.getvalue()
        if hasattr(destination, "write"):
            stream = destination
            serializer.serialize(stream, encoding=encoding, **args)
        else:
            location = destination
            scheme, netloc, path, params, query, fragment = urlparse(location)
            if netloc != "":
                print("WARNING: not saving as location" +
                      "is not a local file reference")
                return
            fd, name = tempfile.mkstemp()
            stream = os.fdopen(fd, 'wb')
            serializer.serialize(stream, encoding=encoding, **args)
            stream.close()
            if hasattr(shutil, "move"):
                shutil.move(name, path)
            else:
                shutil.copy(name, path)
                os.remove(name)
示例#8
0
def decompress(data):
    """
    Decompresses the given data via the snappy algorithm.

    If ``python-snappy`` is not installed a ``RuntimeError`` is raised.
    """
    if not snappy_available:
        raise RuntimeError("Snappy compression unavailable.")

    buff_offset = len(raw_header)  # skip the header
    length = len(data) - len(raw_header)

    output = BytesIO()

    while buff_offset <= length:
        block_size = struct.unpack_from("!i", data, buff_offset)[0]
        buff_offset += struct.calcsize("!i")

        block = struct.unpack_from("!%ds" % block_size, data, buff_offset)[0]
        buff_offset += block_size

        output.write(snappy.uncompress(block))

    result = output.getvalue()

    output.close()

    return result
示例#9
0
 def __iter__(self):
     if self.is_zipped:
         byte_stream = BytesIO(self.response.content)
         with zipfile.ZipFile(byte_stream) as self.zipfile:
             for name in self.zipfile.namelist():
                 with self.zipfile.open(name) as single_file:
                     if name[-3:] == 'csv':
                         reader = csv.reader(single_file, delimiter=self.delimiter)
                     else:
                         reader = single_file
                     reader_iterator = iter(reader)
                     if self.is_header_present:
                         next(reader_iterator)
                     for line in reader_iterator:
                         yield self._parse_line(line)
         byte_stream.close()
     else:
         stream = codecs.iterdecode(self.response.iter_lines(),
                                    self.response.encoding or self.response.apparent_encoding)
         reader = csv.reader(stream, delimiter=self.delimiter)
         reader_iterator = iter(reader)
         if self.is_header_present:
             next(reader_iterator)
         for line in reader_iterator:
             yield self._parse_line(line)
         stream.close()
示例#10
0
def test_decode_response_gzip():
    body = b'gzip message'

    buf = BytesIO()
    f = gzip.GzipFile('a', fileobj=buf, mode='wb')
    f.write(body)
    f.close()

    compressed_body = buf.getvalue()
    buf.close()
    gzip_response = {
        'body': {
            'string': compressed_body
        },
        'headers': {
            'access-control-allow-credentials': ['true'],
            'access-control-allow-origin': ['*'],
            'connection': ['keep-alive'],
            'content-encoding': ['gzip'],
            'content-length': ['177'],
            'content-type': ['application/json'],
            'date': ['Wed, 02 Dec 2015 19:44:32 GMT'],
            'server': ['nginx']
        },
        'status': {
            'code': 200,
            'message': 'OK'
        }
    }
    decoded_response = decode_response(gzip_response)
    assert decoded_response['body']['string'] == body
    assert decoded_response['headers']['content-length'] == [str(len(body))]
示例#11
0
    def visit_immutation(self, node, children):
        context = self._final_context()
        child_type = children[0].expr_name

        if child_type == 'preview':
            if self.tool == 'httpie':
                command = ['http'] + context.httpie_args(self.method,
                                                         quote=True)
            else:
                assert self.tool == 'curl'
                command = ['curl'] + context.curl_args(self.method, quote=True)
            click.echo(' '.join(command))
        elif child_type == 'action':
            output = BytesIO()
            try:
                env = Environment(stdout=output, is_windows=False)
                httpie_main(context.httpie_args(self.method), env=env)
                content = output.getvalue()
            finally:
                output.close()

            # XXX: Work around a bug of click.echo_via_pager(). When you pass
            # a bytestring to echo_via_pager(), it converts the bytestring with
            # str(b'abc'), which makes it "b'abc'".
            if six.PY2:
                content = unicode(content, 'utf-8')  # noqa
            else:
                content = str(content, 'utf-8')
            click.echo_via_pager(content)

        return node
示例#12
0
文件: conn.py 项目: pashinin/restkit
    def __init__(self,
                 host,
                 port,
                 backend_mod=None,
                 pool=None,
                 is_ssl=False,
                 extra_headers=[],
                 proxy_pieces=None,
                 **ssl_args):

        # connect the socket, if we are using an SSL connection, we wrap
        # the socket.
        self._s = backend_mod.Socket(socket.AF_INET, socket.SOCK_STREAM)
        self._s.connect((host, port))
        if proxy_pieces:
            self._s.sendall(proxy_pieces)
            response = StringIO()
            while response.getvalue()[-4:] != b'\r\n\r\n':
                response.write(self._s.recv(1))
            response.close()
        if is_ssl:
            self._s = ssl.wrap_socket(self._s, **ssl_args)

        self.extra_headers = extra_headers
        self.is_ssl = is_ssl
        self.backend_mod = backend_mod
        self.host = host
        self.port = port
        self._connected = True
        self._life = time.time() - random.randint(0, 10)
        self._pool = pool
        self._released = False
示例#13
0
def output_properties(path=None,
                      content=None,
                      basename=None,
                      pseduo_location=False):
    checksum = hashlib.sha1()
    properties = {
        "class": "File",
    }
    if path is not None:
        properties["path"] = path
        f = open(path, "rb")
    else:
        f = BytesIO(content)

    try:
        contents = f.read(1024 * 1024)
        filesize = 0
        while contents:
            checksum.update(contents)
            filesize += len(contents)
            contents = f.read(1024 * 1024)
    finally:
        f.close()
    properties["checksum"] = "sha1$%s" % checksum.hexdigest()
    properties["size"] = filesize
    set_basename_and_derived_properties(properties, basename)
    _handle_pseudo_location(properties, pseduo_location)
    return properties
示例#14
0
def test_decode_response_gzip():
    body = b'gzip message'

    buf = BytesIO()
    f = gzip.GzipFile('a', fileobj=buf, mode='wb')
    f.write(body)
    f.close()

    compressed_body = buf.getvalue()
    buf.close()
    gzip_response = {
        'body': {'string': compressed_body},
        'headers': {
            'access-control-allow-credentials': ['true'],
            'access-control-allow-origin': ['*'],
            'connection': ['keep-alive'],
            'content-encoding': ['gzip'],
            'content-length': ['177'],
            'content-type': ['application/json'],
            'date': ['Wed, 02 Dec 2015 19:44:32 GMT'],
            'server': ['nginx']
        },
        'status': {'code': 200, 'message': 'OK'}
    }
    decoded_response = decode_response(gzip_response)
    assert decoded_response['body']['string'] == body
    assert decoded_response['headers']['content-length'] == [str(len(body))]
示例#15
0
def test_augment_observation(test_file, test_file_uri):
    test_fitsparser = FitsParser(test_file)
    test_obs = Observation('collection', 'observation_id',
                           Algorithm('algorithm'))
    test_fitsparser.augment_observation(test_obs, test_file_uri)
    assert test_obs is not None
    assert test_obs.planes is not None
    assert len(test_obs.planes) == 1
    test_plane = test_obs.planes['HI-line']
    assert test_plane.artifacts is not None
    assert len(test_plane.artifacts) == 1
    test_artifact = test_plane.artifacts[test_file_uri]
    assert test_artifact is not None
    test_part = test_artifact.parts['0']
    # remove the chunk bit, as it's part of other tests -
    # results in <caom2:chunks/> xml output
    test_part.chunks.pop()
    # set the ids to expected values
    test_obs._id = uuid.UUID('00000000000000001234567812345678')
    test_plane._id = uuid.UUID('00000000000000001234567812345678')
    test_artifact._id = uuid.UUID('00000000000000001234567812345678')
    test_part._id = uuid.UUID('00000000000000001234567812345678')
    output = BytesIO()
    ow = ObservationWriter(False, False, "caom2",
                           obs_reader_writer.CAOM20_NAMESPACE)
    ow.write(test_obs, output)
    result = output.getvalue().decode('UTF-8')
    output.close()
    assert result == EXPECTED_OBS_XML  # , result
示例#16
0
class SSHStorageFile(File):
    def __init__(self, name, storage, mode):
        self._name = name
        self._storage = storage
        self._mode = mode
        self._is_dirty = False
        self.file = BytesIO()
        self._size = None

    @property
    def size(self):
        logger.debug("I am the size")
        if not hasattr(self, '_size'):
            self._size = self._storage.size(self._name)
        return self._size

    def read(self, num_bytes=None):
        logger.debug("I am the read")
        self.file = self._storage._read(self._name)

        return self.file.read(num_bytes)

    def write(self, content):
        logger.debug("I am the write")
        if 'w' not in self._mode:
            raise AttributeError("File was opened for read-only access.")
        self.file = BytesIO(content)
        self._is_dirty = True

    def close(self):
        logger.debug("I am the close")
        if self._is_dirty:
            self._storage._save(self._name, self.file.getvalue())
        self.file.close()
示例#17
0
    def visit_immutation(self, node, children):
        context = self._final_context()
        child_type = children[0].expr_name

        if child_type == 'preview':
            if self.tool == 'httpie':
                command = ['http'] + context.httpie_args(self.method,
                                                         quote=True)
            else:
                assert self.tool == 'curl'
                command = ['curl'] + context.curl_args(self.method, quote=True)
            click.echo(' '.join(command))
        elif child_type == 'action':
            output = BytesIO()
            try:
                env = Environment(stdout=output, is_windows=False)
                httpie_main(context.httpie_args(self.method), env=env)
                content = output.getvalue()
            finally:
                output.close()

            # XXX: Work around a bug of click.echo_via_pager(). When you pass
            # a bytestring to echo_via_pager(), it converts the bytestring with
            # str(b'abc'), which makes it "b'abc'".
            if six.PY2:
                content = unicode(content, 'utf-8')  # noqa
            else:
                content = str(content, 'utf-8')
            click.echo_via_pager(content)

        return node
示例#18
0
文件: snappy.py 项目: wglass/kiel
def compress(data):
    """
    Compresses given data via the snappy algorithm.

    The result is preceded with a header containing the string 'SNAPPY' and the
    default and min-compat versions (both ``1``).

    The block size for the compression is hard-coded at 32kb.

    If ``python-snappy`` is not installed a ``RuntimeError`` is raised.
    """
    if not snappy_available:
        raise RuntimeError("Snappy compression unavailable.")

    buff = BytesIO()
    buff.write(raw_header)

    for block_num in range(0, len(data), BLOCK_SIZE):
        block = data[block_num:block_num + BLOCK_SIZE]
        compressed = snappy.compress(block)

        buff.write(struct.pack("!i", len(compressed)))
        buff.write(compressed)

    result = buff.getvalue()

    buff.close()

    return result
示例#19
0
 def test_writing_1_record(self):
     expected = r"""
         <?xml version="1.0" encoding="UTF-8"?>
         <collection xmlns="http://www.loc.gov/MARC21/slim">
         <record>
         <leader>          22        4500</leader>
         <datafield ind1="0" ind2="0" tag="100">
         <subfield code="a">me</subfield>
         </datafield>
         <datafield ind1="0" ind2="0" tag="245">
         <subfield code="a">Foo /</subfield>
         <subfield code="c">by me.</subfield>
         </datafield>
         </record>
         </collection>
     """
     expected = textwrap.dedent(expected[1:]).replace('\n', '')
     if str != binary_type:
         expected = expected.encode()
     file_handle = BytesIO()
     try:
         writer = pymarc.XMLWriter(file_handle)
         record = pymarc.Record()
         record.add_field(
             pymarc.Field('100', ['0', '0'], ['a', u('me')]))
         record.add_field(
             pymarc.Field(
                 '245',
                 ['0', '0'],
                 ['a', u('Foo /'), 'c', u('by me.')]))
         writer.write(record)
         writer.close(close_fh=False)
         self.assertEquals(file_handle.getvalue(), expected)
     finally:
         file_handle.close()
示例#20
0
文件: matrix.py 项目: Xowap/gibi
    def test_write(self):
        r = StringIO('ab')
        n = FrenchNormalizer(r)
        m = Matrix()
        w = BytesIO()

        m.feed(n)
        m.dump(w)

        expected = {
            (None,): {
                False: 1,
            },
            (False,): {
                'a': 1,
            },
            ('a',): {
                'b': 1,
            },
            ('b',): {
                True: 1
            }
        }

        print(pickle.loads(w.getvalue()))
        print(expected)

        assert pickle.loads(w.getvalue()) == expected
        w.close()
示例#21
0
文件: conn.py 项目: pashinin/restkit
    def __init__(self, host, port, backend_mod=None, pool=None,
                 is_ssl=False, extra_headers=[], proxy_pieces=None, **ssl_args):

        # connect the socket, if we are using an SSL connection, we wrap
        # the socket.
        self._s = backend_mod.Socket(socket.AF_INET, socket.SOCK_STREAM)
        self._s.connect((host, port))
        if proxy_pieces:
            self._s.sendall(proxy_pieces)
            response = StringIO()
            while response.getvalue()[-4:] != b'\r\n\r\n':
                response.write(self._s.recv(1))
            response.close()
        if is_ssl:
            self._s = ssl.wrap_socket(self._s, **ssl_args)

        self.extra_headers = extra_headers
        self.is_ssl = is_ssl
        self.backend_mod = backend_mod
        self.host = host
        self.port = port
        self._connected = True
        self._life = time.time() - random.randint(0, 10)
        self._pool = pool
        self._released = False
示例#22
0
def output_properties(path=None, content=None, basename=None, pseduo_location=False):
    checksum = hashlib.sha1()
    properties = {
        "class": "File",
    }
    if path is not None:
        properties["path"] = path
        f = open(path, "rb")
    else:
        f = BytesIO(content)

    try:
        contents = f.read(1024 * 1024)
        filesize = 0
        while contents:
            checksum.update(contents)
            filesize += len(contents)
            contents = f.read(1024 * 1024)
    finally:
        f.close()
    properties["checksum"] = "sha1$%s" % checksum.hexdigest()
    properties["size"] = filesize
    set_basename_and_derived_properties(properties, basename)
    _handle_pseudo_location(properties, pseduo_location)
    return properties
示例#23
0
 def test_writing_1_record(self):
     expected = r"""
         <?xml version="1.0" encoding="UTF-8"?>
         <collection xmlns="http://www.loc.gov/MARC21/slim">
         <record>
         <leader>          22        4500</leader>
         <datafield ind1="0" ind2="0" tag="100">
         <subfield code="a">me</subfield>
         </datafield>
         <datafield ind1="0" ind2="0" tag="245">
         <subfield code="a">Foo /</subfield>
         <subfield code="c">by me.</subfield>
         </datafield>
         </record>
         </collection>
     """
     expected = textwrap.dedent(expected[1:]).replace('\n', '')
     if str != binary_type:
         expected = expected.encode()
     file_handle = BytesIO()
     try:
         writer = pymarc.XMLWriter(file_handle)
         record = pymarc.Record()
         record.add_field(pymarc.Field('100', ['0', '0'], ['a', u('me')]))
         record.add_field(
             pymarc.Field(
                 '245', ['0', '0'],
                 ['a', u('Foo /'), 'c', u('by me.')]))
         writer.write(record)
         writer.close(close_fh=False)
         self.assertEquals(file_handle.getvalue(), expected)
     finally:
         file_handle.close()
示例#24
0
  def transfer_toClient(self, fileId, token, fileHelper):
    version = ""
    if isinstance(fileId, six.string_types):
      if fileId in ['CAs', 'CRLs']:
        return self.__transferFile(fileId, fileHelper)
      else:
        bId = fileId
    elif isinstance(fileId, (list, tuple)):
      if len(fileId) == 0:
        fileHelper.markAsTransferred()
        return S_ERROR("No bundle specified!")
      elif len(fileId) == 1:
        bId = fileId[0]
      else:
        bId = fileId[0]
        version = fileId[1]
    if not self.bundleManager.bundleExists(bId):
      fileHelper.markAsTransferred()
      return S_ERROR("Unknown bundle %s" % bId)

    bundleVersion = self.bundleManager.getBundleVersion(bId)
    if bundleVersion is None:
      fileHelper.markAsTransferred()
      return S_ERROR("Empty bundle %s" % bId)

    if version == bundleVersion:
      fileHelper.markAsTransferred()
      return S_OK(bundleVersion)

    buffer_ = BytesIO(self.bundleManager.getBundleData(bId))
    result = fileHelper.DataSourceToNetwork(buffer_)
    buffer_.close()
    if not result['OK']:
      return result
    return S_OK(bundleVersion)
示例#25
0
def test_decode_response_gzip():
    body = b"gzip message"

    buf = BytesIO()
    f = gzip.GzipFile("a", fileobj=buf, mode="wb")
    f.write(body)
    f.close()

    compressed_body = buf.getvalue()
    buf.close()
    gzip_response = {
        "body": {
            "string": compressed_body
        },
        "headers": {
            "access-control-allow-credentials": ["true"],
            "access-control-allow-origin": ["*"],
            "connection": ["keep-alive"],
            "content-encoding": ["gzip"],
            "content-length": ["177"],
            "content-type": ["application/json"],
            "date": ["Wed, 02 Dec 2015 19:44:32 GMT"],
            "server": ["nginx"],
        },
        "status": {
            "code": 200,
            "message": "OK"
        },
    }
    decoded_response = decode_response(gzip_response)
    assert decoded_response["body"]["string"] == body
    assert decoded_response["headers"]["content-length"] == [str(len(body))]
示例#26
0
    def request(self, method, uri, xml_root=None):
        """
        :type method: str
        :type uri: str
        :type xml_root: ETree.Element
        :rtype: ETree.Element
        :raises ClarityException: if Clarity returns an exception as XML
        """
        request_start_seconds = time.perf_counter() if self.log_requests else 0
        if xml_root is None:
            response = self.raw_request(method, uri)
        else:
            # Falls back to StringIO and regular string for Python 2
            outbuffer = BytesIO(b('<?xml version="1.0" encoding="UTF-8"?>\n'))
            ETree.ElementTree(xml_root).write(outbuffer)
            outbuffer.seek(0)
            log.debug("Data for request: %s", outbuffer.read())
            outbuffer.seek(0)
            response = self.raw_request(
                method,
                uri,
                data=outbuffer.getvalue(),
                headers={'Content-Type': 'application/xml'})
            outbuffer.close()

        xml_response_root = ETree.XML(
            response.content) if response.content else None

        if self.log_requests:
            request_elapsed_seconds = time.perf_counter(
            ) - request_start_seconds
            log.info("clarity request method: '%s' uri: %s took: %.3f s",
                     method, uri, request_elapsed_seconds)

        return xml_response_root
示例#27
0
def check_pickle(obj):
    fh = BytesIO()
    cPickle.dump(obj, fh, protocol=cPickle.HIGHEST_PROTOCOL)
    plen = fh.tell()
    fh.seek(0, 0)
    res = cPickle.load(fh)
    fh.close()
    return res, plen
示例#28
0
def image_to_png(img):
    """Convert a PIL image to a PNG binary string."""
    exp = BytesIO()
    img.save(exp, format='png')
    exp.seek(0)
    s = exp.read()
    exp.close()
    return s
示例#29
0
    def test_safe_md5(self):
        exp = 'ab07acbb1e496801937adfa772424bf7'

        fd = BytesIO(b'foo bar baz')
        obs = safe_md5(fd)
        self.assertEqual(obs.hexdigest(), exp)

        fd.close()
示例#30
0
文件: smopy.py 项目: vins31/smopy
def image_to_png(img):
    """Convert a PIL image to a PNG binary string."""
    exp = BytesIO()
    img.save(exp, format='png')
    exp.seek(0)
    s = exp.read()
    exp.close()
    return s
示例#31
0
    def test_safe_md5(self):
        exp = 'ab07acbb1e496801937adfa772424bf7'

        fd = BytesIO(b'foo bar baz')
        obs = safe_md5(fd)
        self.assertEqual(obs.hexdigest(), exp)

        fd.close()
示例#32
0
    def test_safe_md5(self):
        """Make sure we have the expected md5"""
        exp = 'ab07acbb1e496801937adfa772424bf7'

        fd = BytesIO(b'foo bar baz')
        obs = safe_md5(fd)
        self.assertEqual(obs.hexdigest(), exp)

        fd.close()
示例#33
0
    def test_safe_md5(self):
        """Make sure we have the expected md5"""
        exp = 'ab07acbb1e496801937adfa772424bf7'

        fd = BytesIO(b'foo bar baz')
        obs = safe_md5(fd)
        self.assertEqual(obs.hexdigest(), exp)

        fd.close()
示例#34
0
def getBytes(tx):
	"""
	Hash transaction object into bytes data.

	Argument:
	tx (dict) -- transaction object

	Return bytes sequence
	"""
	buf = BytesIO()
	# write type and timestamp
	pack("<bi", buf, (tx["type"], int(tx["timestamp"])))
	# write senderPublicKey as bytes in buffer
	if "senderPublicKey" in tx:
		pack_bytes(buf, unhexlify(tx["senderPublicKey"]))
	# if there is a requesterPublicKey
	if "requesterPublicKey" in tx:
		pack_bytes(buf, unhexlify(tx["requesterPublicKey"]))
	# if there is a recipientId
	if tx.get("recipientId", False):
		recipientId = tx["recipientId"]
		recipientId = base58.b58decode_check(str(recipientId) if not isinstance(recipientId, bytes) \
			else recipientId)
	else:
		recipientId = b"\x00" * 21
	pack_bytes(buf, recipientId)
	# if there is a vendorField
	if tx.get("vendorField", False):
		vendorField = tx["vendorField"][:64].ljust(64, "\x00")
	else:
		vendorField = "\x00" * 64
	pack_bytes(buf, vendorField.encode("utf-8"))
	# write amount and fee value
	pack("<QQ", buf, (int(tx["amount"]), int(tx["fee"])))
	# if there is asset data
	if tx.get("asset", False):
		asset = tx["asset"]
		typ = tx["type"]
		if typ == 1 and "signature" in asset:
			pack_bytes(buf, unhexlify(asset["signature"]["publicKey"]))
		elif typ == 2 and "delegate" in asset:
			pack_bytes(buf, asset["delegate"]["username"].encode("utf-8"))
		elif typ == 3 and "votes" in asset:
			pack_bytes(buf, "".join(asset["votes"]).encode("utf-8"))
		else:
			pass
	# if there is a signature
	if tx.get("signature", False):
		pack_bytes(buf, unhexlify(tx["signature"]))
	# if there is a second signature
	if tx.get("signSignature", False):
		pack_bytes(buf, unhexlify(tx["signSignature"]))

	result = buf.getvalue()
	buf.close()
	return result
示例#35
0
def check_binary(name, file_path=True):
    # Handles files if file_path is True or text if file_path is False
    if file_path:
        temp = open(name, "rb")
    else:
        temp = BytesIO(name)
    try:
        return util.is_binary(temp.read(1024))
    finally:
        temp.close()
示例#36
0
    def test_50_get(self):
        io = BytesIO()
        self.webdav.download('handler.py', io)
        self.assertEqual(utils.text(inspect.getsource(data_handler)), utils.text(io.getvalue()))
        io.close()

        io = BytesIO()
        self.webdav.download('sample_handler.py', io)
        self.assertEqual(utils.text(inspect.getsource(data_sample_handler)), utils.text(io.getvalue()))
        io.close()
示例#37
0
def make_image(image, height, width, channel):
    """Convert an numpy representation image to Image protobuf"""
    output = BytesIO()
    image.save(output, format='PNG')
    image_string = output.getvalue()
    output.close()
    return Summary.Image(height=height,
                         width=width,
                         colorspace=channel,
                         encoded_image_string=image_string)
示例#38
0
    def test_50_get(self):
        io = BytesIO()
        self.webdav.download('handler.py', io)
        self.assertEqual(inspect.getsource(data_handler), io.getvalue())
        io.close()

        io = BytesIO()
        self.webdav.download('sample_handler.py', io)
        self.assertEqual(inspect.getsource(data_sample_handler), io.getvalue())
        io.close()
示例#39
0
def check_binary(name, file_path=True):
    # Handles files if file_path is True or text if file_path is False
    if file_path:
        temp = open(name, "rb")
    else:
        temp = BytesIO(name)
    try:
        return util.is_binary(temp.read(1024))
    finally:
        temp.close()
示例#40
0
def xml_c14nize(data):
    """ Returns a canonical value of an XML document.
    """
    if not isinstance(data, etree._Element):
        data = etree.fromstring(data.encode('utf-8'))

    out = BytesIO()
    data.getroottree().write_c14n(out)
    value = out.getvalue()
    out.close()
    return value
示例#41
0
def xml_c14nize(data):
    """ Returns a canonical value of an XML document.
    """
    if not isinstance(data, etree._Element):
        data = etree.fromstring(data.encode('utf-8'))

    out = BytesIO()
    data.getroottree().write_c14n(out)
    value = out.getvalue()
    out.close()
    return value
示例#42
0
    def test_50_get(self):
        import easywebdav
        with self.assertRaises(easywebdav.OperationFailed):
            io = BytesIO()
            self.webdav.download('handler.py', io)
            io.close()

        io = BytesIO()
        self.webdav_up.download('handler.py', io)
        self.assertEqual(utils.text(inspect.getsource(data_handler)), utils.text(io.getvalue()))
        io.close()
示例#43
0
    def test_50_get(self):
        import easywebdav
        with self.assertRaises(easywebdav.OperationFailed):
            io = BytesIO()
            self.webdav.download('handler.py', io)
            io.close()

        io = BytesIO()
        self.webdav_up.download('handler.py', io)
        self.assertEqual(utils.text(inspect.getsource(data_handler)), utils.text(io.getvalue()))
        io.close()
示例#44
0
 def generate(self):
     """Generates the Gzipped sitemap."""
     xml = self.template()
     fp = BytesIO()
     gzip = GzipFile(self.filename, 'wb', 9, fp)
     if isinstance(xml, six.text_type):
         xml = xml.encode('utf8')
     gzip.write(xml)
     gzip.close()
     data = fp.getvalue()
     fp.close()
     return data
示例#45
0
 def generate(self):
     """Generates the Gzipped sitemap."""
     xml = self.template()
     fp = BytesIO()
     gzip = GzipFile(self.filename, 'wb', 9, fp)
     if isinstance(xml, six.text_type):
         xml = xml.encode('utf8')
     gzip.write(xml)
     gzip.close()
     data = fp.getvalue()
     fp.close()
     return data
示例#46
0
def get_qr_svg_code(totp_uri):
	'''Get SVG code to display Qrcode for OTP.'''
	url = qrcreate(totp_uri)
	svg = ''
	stream = BytesIO()
	try:
		url.svg(stream, scale=4, background="#eee", module_color="#222")
		svg = stream.getvalue().decode().replace('\n', '')
		svg = b64encode(svg.encode())
	finally:
		stream.close()
	return svg
示例#47
0
class QiniuFile(File):
    def __init__(self, name, storage, mode):
        self._storage = storage
        if name.startswith(self._storage.location):
            name = name[len(self._storage.location):]
        self._name = name.lstrip('/')
        self._mode = mode
        self.file = BytesIO()
        self._is_dirty = False
        self._is_read = False

    @property
    def size(self):
        if self._is_dirty or self._is_read:
            # Get the size of a file like object
            # Check http://stackoverflow.com/a/19079887
            old_file_position = self.file.tell()
            self.file.seek(0, os.SEEK_END)
            self._size = self.file.tell()
            self.file.seek(old_file_position, os.SEEK_SET)
        if not hasattr(self, '_size'):
            self._size = self._storage.size(self._name)
        return self._size

    def read(self, num_bytes=None):
        if not self._is_read:
            content = self._storage._read(self._name)
            self.file = BytesIO(content)
            self._is_read = True

        if num_bytes is None:
            data = self.file.read()
        else:
            data = self.file.read(num_bytes)

        if 'b' in self._mode:
            return data
        else:
            return force_text(data)

    def write(self, content):
        if 'w' not in self._mode:
            raise AttributeError("File was opened for read-only access.")

        self.file.write(force_bytes(content))
        self._is_dirty = True
        self._is_read = True

    def close(self):
        if self._is_dirty:
            self.file.seek(0)
            self._storage._save(self._name, self.file)
        self.file.close()
示例#48
0
def get_qr_svg_code(totp_uri):
	'''Get SVG code to display Qrcode for OTP.'''
	url = qrcreate(totp_uri)
	svg = ''
	stream = BytesIO()
	try:
		url.svg(stream, scale=4, background="#eee", module_color="#222")
		svg = stream.getvalue().decode().replace('\n', '')
		svg = b64encode(svg.encode())
	finally:
		stream.close()
	return svg
示例#49
0
class UniversalBytesIO(object):

    def __init__(self, container=None, charset=None):
        self.charset = charset or settings.DEFAULT_CHARSET
        self._container = BytesIO() if container is None else container

    # These methods partially implement the file-like object interface.
    # See https://docs.python.org/3/library/io.html#io.IOBase

    def close(self):
        self._container.close()

    def write(self, content):
        self._container.write(self.make_bytes(content))

    def flush(self):
        self._container.flush()

    def tell(self):
        return self._container.tell()

    def readable(self):
        return False

    def seekable(self):
        return False

    def writable(self):
        return True

    def writelines(self, lines):
        for line in lines:
            self.write(line)

    def make_bytes(self, value):
        """Turn a value into a bytestring encoded in the output charset."""
        if isinstance(value, bytes):
            return bytes(value)
        if isinstance(value, six.text_type):
            return bytes(value.encode(self.charset))

        # Handle non-string types
        return force_bytes(value, self.charset)

    def get_string_value(self):
        return self._container.getvalue().decode(self.charset)

    def getvalue(self):
        return self._container.getvalue()

    if sys.version_info[0:2] < (3, 5):
        def seek(self, *args, **kwargs):
            pass
示例#50
0
def generateErrorMessagePlot(msgText):
    """
  It creates a plot whith a specific error message

  :param str msgText: the text which will appear on the plot.
  :return: the plot.
  """
    fn = BytesIO()
    textGraph(msgText, fn, {})
    data = fn.getvalue()
    fn.close()
    return S_OK(data)
 def _read_file(self, file_path, is_inner=False):
     try:
         output = BytesIO(self.read(file_path))
         result = output.getvalue()
         output.close()
     except KeyError:
         try:
             if is_inner:
                 raise KeyError
             result = self._get_inner_zip()._read_file(file_path, True)
         except KeyError:
             raise Exception(_('Unable to find file "%s" in manifest.') % file_path)
     return result
示例#52
0
class TMemoryBuffer(TTransportBase, CReadableTransport):
  """Wraps a cStringIO object as a TTransport.

  NOTE: Unlike the C++ version of this class, you cannot write to it
        then immediately read from it.  If you want to read from a
        TMemoryBuffer, you must either pass a string to the constructor.
  TODO(dreiss): Make this work like the C++ version.
  """

  def __init__(self, value=None):
    """value -- a value to read from for stringio

    If value is set, this will be a transport for reading,
    otherwise, it is for writing"""
    if value is not None:
      self._buffer = BytesIO(value)
    else:
      self._buffer = BytesIO()

  def isOpen(self):
    return not self._buffer.closed

  def open(self):
    pass

  def close(self):
    self._buffer.close()

  def read(self, sz):
    return self._buffer.read(sz)

  def write(self, buf):
    try:
      self._buffer.write(buf)
    except TypeError:
      self._buffer.write(buf.encode('cp437'))

  def flush(self):
    pass

  def getvalue(self):
    return self._buffer.getvalue()

  # Implement the CReadableTransport interface.
  @property
  def cstringio_buf(self):
    return self._buffer

  def cstringio_refill(self, partialread, reqlen):
    # only one shot at reading...
    raise EOFError()
示例#53
0
 def tobuffer(kv):
     firstkey = None
     buf = BytesIO()
     for k, v in kv:
         if firstkey is None:
             firstkey = k
         buf.write(v.tostring())
     val = buf.getvalue()
     buf.close()
     if firstkey is None:
         return iter([])
     else:
         label = prefix + '-' + getlabel(firstkey) + ".bin"
         return iter([(label, val)])
    def uploadChunk(self, upload, chunk):
        """
        Appends the chunk into the temporary file.
        """
        # If we know the chunk size is too large or small, fail early.
        self.checkUploadSize(upload, self.getChunkSize(chunk))

        if isinstance(chunk, six.text_type):
            chunk = chunk.encode('utf8')

        if isinstance(chunk, six.binary_type):
            chunk = BytesIO(chunk)

        # Restore the internal state of the streaming SHA-512 checksum
        checksum = _hash_state.restoreHex(upload['sha512state'], 'sha512')

        if self.requestOffset(upload) > upload['received']:
            # This probably means the server died midway through writing last
            # chunk to disk, and the database record was not updated. This
            # means we need to update the sha512 state with the difference.
            with open(upload['tempFile'], 'rb') as tempFile:
                tempFile.seek(upload['received'])
                while True:
                    data = tempFile.read(BUF_SIZE)
                    if not data:
                        break
                    checksum.update(data)

        with open(upload['tempFile'], 'a+b') as tempFile:
            size = 0
            while not upload['received'] + size > upload['size']:
                data = chunk.read(BUF_SIZE)
                if not data:
                    break
                size += len(data)
                tempFile.write(data)
                checksum.update(data)
        chunk.close()

        try:
            self.checkUploadSize(upload, size)
        except ValidationException:
            with open(upload['tempFile'], 'a+b') as tempFile:
                tempFile.truncate(upload['received'])
            raise

        # Persist the internal state of the checksum
        upload['sha512state'] = _hash_state.serializeHex(checksum)
        upload['received'] += size
        return upload
示例#55
0
def decompress(data):
    """
    Decompresses given data via the ``gzip`` module.

    Decoding is left as an exercise for the client code.
    """
    buff = BytesIO(data)

    with gzip.GzipFile(fileobj=buff, mode='r') as fd:
        result = fd.read()

    buff.close()

    return result
示例#56
0
def response_to_json(response):
    """
    hack for requests in python 2.6
    """

    content = response.content
    # hack for requests in python 2.6
    if 'application/json' in response.headers['Content-Type']:
        if content[:2] == '\x1f\x8b':  # gzip file magic header
            f = BytesIO(content)
            g = gzip.GzipFile(fileobj=f)
            content = g.read()
            g.close()
            f.close()
    return json.loads(str(content.decode('utf-8')))
示例#57
0
 def test_writing_0_records(self):
     expected = r"""
         <?xml version="1.0" encoding="UTF-8"?>
         <collection xmlns="http://www.loc.gov/MARC21/slim">
         </collection>
     """
     expected = textwrap.dedent(expected[1:]).replace('\n', '')
     if str != binary_type:
         expected = expected.encode()
     file_handle = BytesIO()
     try:
         writer = pymarc.XMLWriter(file_handle)
         writer.close(close_fh=False)
         self.assertEquals(file_handle.getvalue(), expected)
     finally:
         file_handle.close()
示例#58
0
    def visit_immutation(self, node, children):
        context = self._final_context()
        child_type = children[0].expr_name

        if child_type == 'preview':
            if self.tool == 'httpie':
                command = ['http'] + context.httpie_args(self.method,
                                                         quote=True)
            else:
                assert self.tool == 'curl'
                command = ['curl'] + context.curl_args(self.method, quote=True)
            click.echo(' '.join(command))
        elif child_type == 'action':
            output = BytesIO()
            try:
                env = Environment(stdout=output, is_windows=False)

                # XXX: httpie_main() doesn't provide an API for us to get the
                # HTTP response object, so we use this super dirty hack -
                # sys.settrace() to intercept get_response() that is called in
                # httpie_main() internally. The HTTP response intercepted is
                # assigned to self.last_response, which may be useful for
                # self.listener.
                sys.settrace(self._trace_get_response)
                try:
                    httpie_main(context.httpie_args(self.method), env=env)
                finally:
                    sys.settrace(None)

                content = output.getvalue()
            finally:
                output.close()

            # XXX: Work around a bug of click.echo_via_pager(). When you pass
            # a bytestring to echo_via_pager(), it converts the bytestring with
            # str(b'abc'), which makes it "b'abc'".
            if six.PY2:
                content = unicode(content, 'utf-8')  # noqa
            else:
                content = str(content, 'utf-8')
            click.echo_via_pager(content)

            if self.last_response:
                self.listener.response_returned(self.context,
                                                self.last_response)

        return node
示例#59
0
def compress(data):
    """
    Compresses a given bit of data via the ``gzip`` stdlib module.

    .. note::

      This assumes the given data is a byte string, already decoded.
    """
    buff = BytesIO()

    with gzip.GzipFile(fileobj=buff, mode='w') as fd:
        fd.write(data)

    buff.seek(0)
    result = buff.read()

    buff.close()

    return result