Example #1
0
class InMemoryFile:

    implements(ISFTPFile)

    def __init__(self, data=""):
        self.data = StringIO(data)

    def close(self):
        self.data.seek(0)

    def readChunk(self, offset, length):
        self.data.seek(offset)
        return self.data.read(length)

    def writeChunk(self, offset, data):
        self.data.write(data)

    def getAttrs(self):
        current = self.data.tell()
        self.data.seek(0, 2)
        size = self.data.tell()
        self.data.seek(current)
        return {"size": size, "uid": 22, "gid": 23, "permissions": 33261, "atime": 1322669000, "mtime": 1322669000}

    def setAttrs(self, attrs):
        """
class StdinIOStringWrapper(object):
    def __init__(self):
        self.buffer = StringIO()

    def __len__(self):
        return self._buffer_len() - self.buffer.tell()

    def _buffer_len(self):
        cur_pos = self.buffer.tell()
        self.buffer.seek(0, os.SEEK_END)
        length = self.buffer.tell()
        self.buffer.seek(cur_pos)
        return length

    def readline(self):
        if self._buffer_len() == self.buffer.tell():
            raise UnitTestNoMoreEventsException("No more events")

        return self.buffer.readline()

    def read(self, n=-1):
        return self.buffer.read(n)

    def write(self, buf):
        log.debug(
            colored("StdinIOStringWrapper.write() EVENT: '%s'" % buf, 'red'))
        pos = self.buffer.tell()
        self.buffer.write(buf)
        self.buffer.seek(pos)
Example #3
0
def do_http(method, url, body="", event_stream=False):
    assert isinstance(body, str)
    body = StringIO(body)

    scheme, host, port, path = parse_url(url)
    if scheme == "http":
        c = httplib.HTTPConnection(host, port)
    elif scheme == "https":
        c = httplib.HTTPSConnection(host, port)
    else:
        raise ValueError("unknown scheme '%s', need http or https" % scheme)
    c.putrequest(method, path)
    c.putheader("Hostname", host)
    c.putheader("User-Agent", "petmail client")
    if event_stream:
        c.putheader("Accept", "text/event-stream")
    else:
        c.putheader("Accept", "text/plain, application/octet-stream")
    c.putheader("Connection", "close")

    old = body.tell()
    body.seek(0, os.SEEK_END)
    length = body.tell()
    body.seek(old)
    c.putheader("Content-Length", str(length))

    try:
        c.endheaders()
    except socket_error, err:
        return BadResponse(url, err)
 def tell(self):
     with self.cond:
         pos = StringIO.tell(self)
         # TODO this may lead to horrible performance, work on that
         if pos < len(self.getvalue())-1:
             return pos
     self.done.wait()
     return StringIO.tell(self)
Example #5
0
    def test_no_preload(self):
        fp = StringIO('foo')

        r = HTTPResponse(fp, preload_content=False)

        self.assertEqual(fp.tell(), 0)
        self.assertEqual(r.data, 'foo')
        self.assertEqual(fp.tell(), fp.len)
Example #6
0
    def test_no_preload(self):
        fp = StringIO('foo')

        r = HTTPResponse(fp, preload_content=False)

        self.assertEqual(fp.tell(), 0)
        self.assertEqual(r.data, 'foo')
        self.assertEqual(fp.tell(), fp.len)
Example #7
0
    def test_rollback_to_stream(self):
        source_data = StringIO('hello world')
        source_data.seek(1)
        other_data = StringIO('hello world')
        other_data.seek(2)

        self.assertFalse(FileObjMatcher(source_data) == other_data)
        self.assertEqual(source_data.tell(), 1)
        self.assertEqual(other_data.tell(), 2)
Example #8
0
class FlickrFile(object):

    """A file-like object representing a file on flickr. Caches with a StringIO object"""

    def __init__(self, imageid, name, data):
        self.imageid = imageid
        self.name = name
        self.stringio = StringIO(data)
        self.closed = False
        self.newlines = ('\r', '\n', '\r\n')
        self.flush()

    def close(self):
        self.flush()
        self.closed = True

    def _stringio_get_data(self):
        old_seek = self.stringio.tell()
        self.stringio.seek(0)
        data = self.stringio.read()
        self.stringio.seek(old_seek)
        return data

    def flush(self):
        with tempfile.NamedTemporaryFile() as tf:
            data_to_png(self._stringio_get_data()).save(tf, 'png')
            if self.imageid:
                flickr.replace(filename=tf.name, photo_id=self.imageid, title=self.name, description=str(len(data)), format='bs4')
            else:
                self.imageid = flickr.upload(filename=tf.name, title=self.name, description=str(len(data)), format='bs4').photoid.text

    def iter(self):
        return self

    def next(self):
        return self.stringio.next()

    def read(self, size=-1):
        return self.stringio.read(size)

    def readline(self, size=-1):
        return self.stringio.read(size)

    def seek(self, offset, whence=0):
        return self.stringio.seek(offset, whence)

    def tell(self):
        return self.stringio.tell()

    def truncate(self, size=0):
        return self.stringio.truncate(size)

    def write(self, data):
        return self.stringio.write(data)

    def writelines(self, seq):
        return self.stringio.writelines(seq)
Example #9
0
    def test_rollback_to_stream(self):
        source_data = StringIO('hello world')
        source_data.seek(1)
        other_data = StringIO('hello world')
        other_data.seek(2)

        self.assertFalse(FileObjMatcher(source_data) == other_data)
        self.assertEqual(source_data.tell(), 1)
        self.assertEqual(other_data.tell(), 2)
Example #10
0
class ZipExtra:

    AES = Signature.EXTRA_AES
    UPEF = Signature.EXTRA_UPEF  # Unicode Path Extra Field
    ZIP64 = Signature.EXTRA_ZIP64

    @property
    def structs(self):
        return {
            self.AES: struct_extra_aes,
            self.UPEF: struct_extra_upef,
            self.ZIP64: struct_extra_zip64,
        }

    def __init__(self, bytes):
        self.parsed_extra = {}
        self.all_extra = {}

        self.bytes = bytes
        self.stream = StringIO(bytes)
        self.parse()

    def parse(self):
        length = len(self.bytes)
        last_tell = self.stream.tell()
        while last_tell < length:
            extra = struct_extra_header.parseStream(self.stream)
            self.all_extra[extra.signature] = extra
            struct_detail = self.structs.get(extra.signature)
            if struct_detail:
                self.stream.seek(last_tell)
                self.parsed_extra[extra.signature] = struct_detail.parseStream(self.stream)
            else:
                self.stream.seek(last_tell + extra.size() + extra.data_length)
            last_tell = self.stream.tell()

    def getExtra(self, signature):
        return self.parsed_extra.get(signature)

    def pack(self):
        content = ''
        for _, extra in self.parsed_extra.iteritems():
            content += extra.pack()
        return content

    def __repr__(self):
        out = ['ZipExtra:']
        for _, extra in self.parsed_extra.iteritems():
            lines = extra.__repr__().split('\n')
            out += [' ' * 4 + line for line in lines]

        return '\n'.join(out)
Example #11
0
    def testParsePass1(self):
        from hwp5.binmodel import TableCaption, TableCell
        from hwp5.binmodel import parse_models_intern
        stream = StringIO(self.records_bytes)
        records = list(read_records(stream))
        result = list(parse_models_intern(self.ctx, records))

        tablecaption = result[1]
        context, model = tablecaption
        model_type = model['type']
        model_content = model['content']
        stream = context['stream']

        self.assertEquals(TableCaption, model_type)
        self.assertEquals(22, stream.tell())
        # ListHeader attributes
        self.assertEquals(2, model_content['paragraphs'])
        self.assertEquals(0x0L, model_content['listflags'])
        self.assertEquals(0, model_content['unknown1'])
        # TableCaption model_content
        self.assertEquals(3, model_content['flags'])
        self.assertEquals(8504L, model_content['width'])
        self.assertEquals(850, model_content['separation'])
        self.assertEquals(40454L, model_content['maxsize'])

        tablecell = result[3]
        context, model = tablecell
        model_type = model['type']
        model_content = model['content']
        stream = context['stream']
        self.assertEquals(TableCell, model_type)
        self.assertEquals(38, stream.tell())
        # ListHeader model_content
        self.assertEquals(1, model_content['paragraphs'])
        self.assertEquals(0x20L, model_content['listflags'])
        self.assertEquals(0, model_content['unknown1'])
        # TableCell model_content
        self.assertEquals(0, model_content['col'])
        self.assertEquals(0, model_content['row'])
        self.assertEquals(1, model_content['colspan'])
        self.assertEquals(1, model_content['rowspan'])
        self.assertEquals(0x4f03, model_content['width'])
        self.assertEquals(0x11a, model_content['height'])
        self.assertEquals(dict(left=141, right=141, top=141, bottom=141),
                          model_content['padding'])
        self.assertEquals(
            1,
            model_content['borderfill_id'],
        )
        self.assertEquals(0x4f03, model_content['unknown_width'])
Example #12
0
class StreamReceiver(Protocol):
    """
    A Twisted Protocol that receives data from the runtime, and recombines it
    into a data stream.

    Whenever the stream is updated, the streamReceived() method is called on
    the implementation, which can choose to consume some of the data.
    """
    
    stream = None
    
    def __init__(self):
        pass
    
    def connectionMade(self):
        """
        Called when a connection is received to the StreamReceiver. This
        initialises the buffer we will use.
        """

        self.stream = StringIO()
    
    def dataReceived(self, data):
        """
        Called whenever more data is received from the client. The data is
        added to the stream, and streamReceived() called on the implementation.

        If the implementation has consumed the entire stream, we truncate
        it to preserve memory.
        """

        self.__enqueue(data)
        
        self.streamReceived()

        if self.stream.tell() == self.stream.len:
            self.stream.truncate(0)

    def __enqueue(self, data):
        """
        Append some more data to the end of the stream.
        """

        position = self.stream.tell()

        self.stream.seek(self.stream.len)
        self.stream.write(data)
        self.stream.seek(position)
        
Example #13
0
def dummy_base64_file(name='file_test', format='csv', **kwargs):
    """
    Gera um arquivo para 'mockar' nos testes, mas em formato base64. Você pode
    passar os seguintes parametros:
        :param name:
            Nome do Arquivo
        :param format:
            Extensao do Arquivo. Padrão: .csv
        :param content:
            Define o conteudo do arquivo. Padrão: 'Dummy,Content'
        :param content_type:
            Define o conteudo do arquivo, usando formato MIME.

    O resultado gerado, será um JSON, com a seguinte estrutura:

        {
            "tamanho": 10290 (em bytes),
            "formato": "text/csv",
            "nome_arquivo": "file_test.csv",
            "base64": "/019lasdjaslk19aslASdlk12-ASLKDJ123//ASDLAKj=="
        }
    """
    content = kwargs.pop('content', "Dummy,Content")
    content_type = kwargs.pop('content_type', "text/csv")
    filename = "{0}.{1}".format(name, format)
    file_obj = StringIO()

    if format not in VALID_IMAGES:
        file_obj.write(content)
        filesize = file_obj.tell()
        data = base64.b64encode(content.encode()).decode("utf-8")
    else:
        from PIL import Image
        image = dummy_image()
        data = base64.b64encode(image.tobytes()).decode("utf-8")
        file_obj = BytesIO()
        image.save(file_obj, format)
        filesize = file_obj.tell()
        content_type = IMAGE_TYPE.get(format)

    file_obj.seek(0, os.SEEK_END)

    data_file = {}
    data_file["tamanho"] = filesize
    data_file["formato"] = content_type
    data_file["nome_arquivo"] = filename
    data_file["base64"] = data

    return data_file
Example #14
0
 def parseSEED(self, data, expected_length=0):
     """
     Parse given data for blockette fields and create attributes.
     """
     # convert to stream for test issues
     if isinstance(data, basestring):
         expected_length = len(data)
         data = StringIO(data)
     start_pos = data.tell()
     # debug
     if self.debug:
         print(' DATA: %s' % (data.read(expected_length)))
         data.seek(-expected_length, 1)
     blockette_fields = self.default_fields + self.getFields()
     # loop over all blockette fields
     for field in blockette_fields:
         # if blockette length reached break with warning
         if data.tell() - start_pos >= expected_length:
             if not self.strict:
                 break
             if isinstance(field, Loop):
                 break
             msg = "End of blockette " + self.blockette_id + " reached " + \
                   "without parsing all expected fields, here: " + \
                   str(field)
             if self.strict:
                 raise BlocketteLengthException(msg)
             else:
                 warnings.warn(msg, category=Warning)
             break
         field.parseSEED(self, data)
         if field.id == 2:
             expected_length = field.data
     # strict tests
     if not self.strict:
         return
     # check length
     end_pos = data.tell()
     blockette_length = end_pos - start_pos
     if expected_length == blockette_length:
         return
     # wrong length
     msg = 'Wrong size of Blockette %s (%d of %d) in sequence %06d'
     msg = msg % (self.blockette_id, blockette_length,
                  expected_length, self.record_id or 0)
     if self.strict:
         raise BlocketteLengthException(msg)
     else:
         warnings.warn(msg, category=Warning)
Example #15
0
 def parseSEED(self, data, expected_length=0):
     """
     If number of FIR coefficients are larger than maximal blockette size of
     9999 chars a follow up blockette with the same blockette id and
     response lookup key is expected - this is checked here.
     """
     # convert to stream for test issues
     if isinstance(data, basestring):
         expected_length = len(data)
         data = StringIO(data)
     # get current lookup key
     pos = data.tell()
     data.read(7)
     global_lookup_key = int(data.read(4))
     data.seek(pos)
     # read first blockette
     temp = StringIO()
     temp.write(data.read(expected_length))
     # check next blockettes
     while True:
         # save position
         pos = data.tell()
         try:
             blockette_id = int(data.read(3))
         except ValueError:
             break
         if blockette_id != 41:
             # different blockette id -> break
             break
         blockette_length = int(data.read(4))
         lookup_key = int(data.read(4))
         if lookup_key != global_lookup_key:
             # different lookup key -> break
             break
         # ok follow up blockette found - skip some unneeded fields
         self.fields[1].read(data)
         self.fields[2].read(data)
         self.fields[3].read(data)
         self.fields[4].read(data)
         self.fields[5].read(data)
         # remaining length in current blockette
         length = pos - data.tell() + blockette_length
         # read follow up blockette and append it to temporary blockette
         temp.write(data.read(length))
     # reposition file pointer
     data.seek(pos)
     # parse new combined temporary blockette
     temp.seek(0)
     Blockette.parseSEED(self, temp, expected_length=temp.len)
Example #16
0
 def parseSEED(self, data, expected_length=0):
     """
     If number of FIR coefficients are larger than maximal blockette size of
     9999 chars a follow up blockette with the same blockette id and
     response lookup key is expected - this is checked here.
     """
     # convert to stream for test issues
     if isinstance(data, basestring):
         expected_length = len(data)
         data = StringIO(data)
     # get current lookup key
     pos = data.tell()
     data.read(7)
     global_lookup_key = int(data.read(4))
     data.seek(pos)
     # read first blockette
     temp = StringIO()
     temp.write(data.read(expected_length))
     # check next blockettes
     while True:
         # save position
         pos = data.tell()
         try:
             blockette_id = int(data.read(3))
         except ValueError:
             break
         if blockette_id != 41:
             # different blockette id -> break
             break
         blockette_length = int(data.read(4))
         lookup_key = int(data.read(4))
         if lookup_key != global_lookup_key:
             # different lookup key -> break
             break
         # ok follow up blockette found - skip some unneeded fields
         self.fields[1].read(data)
         self.fields[2].read(data)
         self.fields[3].read(data)
         self.fields[4].read(data)
         self.fields[5].read(data)
         # remaining length in current blockette
         length = pos - data.tell() + blockette_length
         # read follow up blockette and append it to temporary blockette
         temp.write(data.read(length))
     # reposition file pointer
     data.seek(pos)
     # parse new combined temporary blockette
     temp.seek(0)
     Blockette.parseSEED(self, temp, expected_length=temp.len)
Example #17
0
 def parseSEED(self, data, expected_length=0):
     """
     Parse given data for blockette fields and create attributes.
     """
     # convert to stream for test issues
     if isinstance(data, basestring):
         expected_length = len(data)
         data = StringIO(data)
     start_pos = data.tell()
     # debug
     if self.debug:
         print(' DATA: %s' % (data.read(expected_length)))
         data.seek(-expected_length, 1)
     blockette_fields = self.default_fields + self.getFields()
     # loop over all blockette fields
     for field in blockette_fields:
         # if blockette length reached break with warning
         if data.tell() - start_pos >= expected_length:
             if not self.strict:
                 break
             if isinstance(field, Loop):
                 break
             msg = "End of blockette " + self.blockette_id + " reached " + \
                   "without parsing all expected fields, here: " + \
                   str(field)
             if self.strict:
                 raise BlocketteLengthException(msg)
             else:
                 warnings.warn(msg, category=Warning)
             break
         field.parseSEED(self, data)
         if field.id == 2:
             expected_length = field.data
     # strict tests
     if not self.strict:
         return
     # check length
     end_pos = data.tell()
     blockette_length = end_pos - start_pos
     if expected_length == blockette_length:
         return
     # wrong length
     msg = 'Wrong size of Blockette %s (%d of %d) in sequence %06d'
     msg = msg % (self.blockette_id, blockette_length, expected_length,
                  self.record_id or 0)
     if self.strict:
         raise BlocketteLengthException(msg)
     else:
         warnings.warn(msg, category=Warning)
Example #18
0
def dummy_base64_file(name='file_test', format='csv', **kwargs):
    """
    Gera um arquivo para 'mockar' nos testes, mas em formato base64. Você pode
    passar os seguintes parametros:
        :param name:
            Nome do Arquivo
        :param format:
            Extensao do Arquivo. Padrão: .csv
        :param content:
            Define o conteudo do arquivo. Padrão: 'Dummy,Content'
        :param content_type:
            Define o conteudo do arquivo, usando formato MIME.

    O resultado gerado, será um JSON, com a seguinte estrutura:

        {
            "tamanho": 10290 (em bytes),
            "formato": "text/csv",
            "nome_arquivo": "file_test.csv",
            "base64": "/019lasdjaslk19aslASdlk12-ASLKDJ123//ASDLAKj=="
        }
    """
    content = kwargs.pop('content', "Dummy,Content")
    content_type = kwargs.pop('content_type', "text/csv")
    filename = "{0}.{1}".format(name, format)
    file_obj = StringIO()

    if format not in VALID_IMAGES:
        file_obj.write(content)
        filesize = file_obj.tell()
        data = base64.b64encode(content.encode()).decode("utf-8")
    else:
        from PIL import Image
        image = dummy_image()
        data = base64.b64encode(image.tobytes()).decode("utf-8")
        file_obj = BytesIO()
        image.save(file_obj, format)
        filesize = file_obj.tell()
        content_type = IMAGE_TYPE.get(format)

    file_obj.seek(0, os.SEEK_END)

    data_file = {}
    data_file["tamanho"] = filesize
    data_file["formato"] = content_type
    data_file["nome_arquivo"] = filename
    data_file["base64"] = data

    return data_file
Example #19
0
 def test_put_should_accept_file_like_objects(self):
     """
     put()'s local_path arg should take file-like objects too
     """
     local = self.path('whatever')
     fake_file = StringIO()
     fake_file.write("testing file-like objects in put()")
     pointer = fake_file.tell()
     target = '/new_file.txt'
     with hide('everything'):
         put(fake_file, target)
         get(target, local)
     eq_contents(local, fake_file.getvalue())
     # Sanity test of file pointer
     eq_(pointer, fake_file.tell())
Example #20
0
 def test_put_should_accept_file_like_objects(self):
     """
     put()'s local_path arg should take file-like objects too
     """
     local = self.path('whatever')
     fake_file = StringIO()
     fake_file.write("testing file-like objects in put()")
     pointer = fake_file.tell()
     target = '/new_file.txt'
     with hide('everything'):
         put(fake_file, target)
         get(target, local)
     eq_contents(local, fake_file.getvalue())
     # Sanity test of file pointer
     eq_(pointer, fake_file.tell())
Example #21
0
class StreamReceiver(Protocol):
    """
    A Twisted Protocol that receives data from the runtime, and recombines it
    into a data stream.

    Whenever the stream is updated, the streamReceived() method is called on
    the implementation, which can choose to consume some of the data.
    """

    stream = None

    def __init__(self):
        pass

    def connectionMade(self):
        """
        Called when a connection is received to the StreamReceiver. This
        initialises the buffer we will use.
        """

        self.stream = StringIO()

    def dataReceived(self, data):
        """
        Called whenever more data is received from the client. The data is
        added to the stream, and streamReceived() called on the implementation.

        If the implementation has consumed the entire stream, we truncate
        it to preserve memory.
        """

        self.__enqueue(data)

        self.streamReceived()

        if self.stream.tell() == self.stream.len:
            self.stream.truncate(0)

    def __enqueue(self, data):
        """
        Append some more data to the end of the stream.
        """

        position = self.stream.tell()

        self.stream.seek(self.stream.len)
        self.stream.write(data)
        self.stream.seek(position)
Example #22
0
    def receive(self, cmd=None):
        recv = StringIO()
        handled = False

        while True:
            data = self.shell.recv(200)

            recv.write(data)
            recv.seek(recv.tell() - 200)

            window = self.strip(recv.read())

            if hasattr(cmd, 'prompt') and not handled:
                if self.handle_prompt(window,
                                      prompt=cmd.prompt,
                                      response=cmd.response):
                    handled = True
                    time.sleep(cmd.delay)
                    if cmd.is_reboot:
                        return

            try:
                if self.find_prompt(window):
                    resp = self.strip(recv.getvalue())
                    return self.sanitize(cmd, resp)
            except ShellError:
                exc = get_exception()
                exc.command = cmd
                raise
def parse_character_set(Txt_or_File, PatternStringF=False):

    if Txt_or_File.__class__ in [file, StringIO]:
        sh       = Txt_or_File
        sh_ref   = sh
        position = sh.tell()
    else:
        sh     = StringIO(Txt_or_File)
        sh_ref = -1

    start_position = sh.tell()

    try:
        # -- parse regular expression, build state machine
        character_set = charset_expression.snap_set_expression(sh)

        if character_set == None:
            error_msg("No valid regular character set expression detected.", sh_ref)

        # -- character set is not supposed to contain buffer limit code
        if character_set.contains(Setup.buffer_limit_code):
            character_set.cut_interval(Interval(Setup.buffer_limit_code, Setup.buffer_limit_code))

    except RegularExpressionException, x:
        error_msg("Regular expression parsing:\n" + x.message, sh_ref)
Example #24
0
 def list_directory(self, path):
     try:
         list = os.listdir(path)
     except os.error:
         self.send_error(404, "Error")
         return None
     list.sort(key=lambda a: a.lower())
     f = StringIO()
     displaypath = cgi.escape(urllib.unquote(self.path))
     f.write("<ul>\n")
     for name in list:
         fullname = os.path.join(path, name)
         displayname = linkname = name
         date_modified = time.ctime(os.path.getmtime(fullname))
         if os.path.isdir(fullname):
             displayname = name + "/"
             linkname = name + "/"
         if os.path.islink(fullname):
             displayname = name + "@"
         f.write('<li><a href="%s "> %s</a>\n' %
                 (urllib.quote(linkname), cgi.escape(displayname)))
     f.write("</ul>")
     length = f.tell()
     f.seek(0)
     self.send_response(200)
     encoding = sys.getfilesystemencoding()
     self.send_header("Content-type", "text/html; charset=%s" % encoding)
     self.send_header("Content-Length", str(length))
     self.end_headers()
     return f
Example #25
0
File: server.py Project: wdq/LANBox
 def list_directory(self, path):
     try:
         list = os.listdir(path)
     except os.error:
         self.send_error(404, "Error")
         return None
     list.sort(key=lambda a: a.lower())
     f = StringIO()
     displaypath = cgi.escape(urllib.unquote(self.path))
     f.write("<ul>\n")
     for name in list:
         fullname = os.path.join(path, name)
         displayname = linkname = name
         date_modified = time.ctime(os.path.getmtime(fullname))
         if os.path.isdir(fullname):
             displayname = name + "/"
             linkname = name + "/"
         if os.path.islink(fullname):
             displayname = name + "@"
         f.write('<li><a href="%s "> %s</a>\n'
                 % (urllib.quote(linkname), cgi.escape(displayname)))
     f.write("</ul>")
     length = f.tell()
     f.seek(0)
     self.send_response(200)
     encoding = sys.getfilesystemencoding()
     self.send_header("Content-type", "text/html; charset=%s" % encoding)
     self.send_header("Content-Length", str(length))
     self.end_headers()
     return f
Example #26
0
    def testItShouldReadFrameFromAStream(self):
        stream = StringIO()
        stream.write("\x00\x00\x00\x02\x00\x00\x00\x0bthe payload")
        stream.seek(0)

        assert frame.Frame.readFrom(stream) != None  # we have read a frame
        assert stream.tell() > 0  # we have advanced the stream
Example #27
0
    def set_content(self, path, content, commit_msg=None):
        """ Add new content in `path` """

        # Create the stream
        stream = StringIO(content.encode('utf-8'))
        stream.seek(0, 2)
        streamlen = stream.tell()
        stream.seek(0)

        istream = IStream("blob", streamlen, stream)

        # Add it to the repository object database
        self.repo.odb.store(istream)

        # Create the corresponding blob object
        blob = Blob(self.repo, istream.binsha, 0100644, path.encode('utf-8'))

        # Commit
        self.repo.index.add([IndexEntry.from_blob(blob)])

        if not commit_msg:
            commit_msg = ugettext('Update Wiki: {0}').format(path.encode('utf-8')).encode('utf-8')

        self.repo.index.commit(commit_msg)

        # Update internal informations
        self._parse()
Example #28
0
    def _send_buffer(self, string_buffer, filename, permissions):
        file_hdl = StringIO(string_buffer)
        basename = asbytes(filename)
        mode = permissions
        size = file_hdl.len

        # The protocol can't handle \n in the filename.
        # Quote them as the control sequence \^J for now,
        # which is how openssh handles it.
        self.channel.sendall(("C%s %d " % (mode, size)).encode('ascii') +
                             basename.replace(b'\n', b'\\^J') + b"\n")
        self._recv_confirm()
        file_pos = 0
        if self._progress:
            if size == 0:
                # avoid divide-by-zero
                self._progress(basename, 1, 1)
            else:
                self._progress(basename, size, 0)
        buff_size = self.buff_size
        chan = self.channel

        while file_pos < size:
            chan.sendall(file_hdl.read(buff_size))
            file_pos = file_hdl.tell()
            if self._progress:
                self._progress(basename, size, file_pos)
        chan.sendall('\x00')
        file_hdl.close()
        self._recv_confirm()
Example #29
0
    def decode(self, s):
        f = StringIO(s)
        while f.tell() < len(s):
            field_number, wire_type, data = self.read_tag(f)
            field = self.lookup_id(field_number)
            if not field:
                continue

            field_multiplicity, field_type, field_name, _ = field
            if issubclass(field_type, PrimativeType):
                value = field_type.decode(data)
            elif issubclass(field_type, Message):
                value = field_type()
                value.decode(data)
            else:
                raise Exception("field type must be a subclass of PrimativeType or Message")

            if field_multiplicity == "repeated":
                if getattr(self, field_name) is None:
                #if not isinstance(getattr(self, field_name), list):
                    # ? what if the attribute was already filled with data ?
                    setattr(self, field_name, [])
                getattr(self, field_name).append(value)
            else:
                setattr(self, field_name, value)
Example #30
0
    def _send_command(self, command, host, port, timeout):
        sock = socket.socket()
        sock.settimeout(timeout)
        buf = StringIO()
        chunk_size = 1024
        try:
            try:

                sock.connect((host, port))
                sock.sendall(command)

                chunk = sock.recv(chunk_size)
                buf.write(chunk)
                num_reads = 1
                max_reads = 10000
                while chunk:
                    if num_reads > max_reads:
                        raise Exception(
                            "Read %s bytes before exceeding max reads of %s. "
                            % (buf.tell(), max_reads))
                    chunk = sock.recv(chunk_size)
                    buf.write(chunk)
                    num_reads += 1
            except (socket.timeout, socket.error):
                raise ZKConnectionFailure()
        finally:
            sock.close()
        return buf
Example #31
0
 def list_directory(self, path):
     listing = os.listdir(path)
     listing.sort(key=lambda a: a.lower())
     f = StringIO()
     f.write('<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">')
     f.write("<html>\n<title>Directory listing</title>\n")
     f.write("<body>\n<h2>Directory listing</h2>\n")
     f.write("<hr>\n<ul>\n")
     for name in listing:
         fullname = os.path.join(path, name)
         displayname = linkname = name
         # Append / for directories or @ for symbolic links
         if os.path.isdir(fullname):
             displayname = name + "/"
             linkname = name + "/"
         if os.path.islink(fullname):
             displayname = name + "@"
             # Note: a link to a directory displays with @ and links with /
         f.write('<li><a href="%s">%s</a>\n' %
                 (urllib.quote(linkname), cgi.escape(displayname)))
     f.write("</ul>\n<hr>\n</body>\n</html>\n")
     length = f.tell()
     f.seek(0)
     self.send_response(200)
     encoding = sys.getfilesystemencoding()
     self.send_header("Content-type", "text/html; charset=%s" % encoding)
     self.send_header("Content-Length", str(length))
     self.end_headers()
     return f
Example #32
0
def wallpaper_thumbnail(sender, **kwargs):
    w = kwargs["instance"]
    from os import path
    try:
        wallpath = w.wallpaper.path
        thumbpath = w.thumbnail.path
    except ValueError:
        thumbpath = path.splitext(wallpath)[0] + ".thumb.jpg"
    try:
        if path.getmtime(wallpath) <= path.getmtime(thumbpath):
            return
    except EnvironmentError:
        pass
    
    from PIL import Image
    image = Image.open(w.wallpaper.path)
    if image.mode not in ('L', 'RGB'):
        image = image.convert('RGB')
    image.thumbnail((220, 220), Image.ANTIALIAS)

    from StringIO import StringIO
    data = StringIO()
    image.save(data, "JPEG")
    data.size = data.tell()
    data.seek(0, 0)
    #data.name = path.basename(thumbpath)
    w.thumbnail.save(path.basename(thumbpath), File(data))
Example #33
0
  def test_seek_set(self):
    for compression_type in [CompressionTypes.BZIP2, CompressionTypes.GZIP]:
      file_name = self._create_compressed_file(compression_type, self.content)
      with open(file_name, 'rb') as f:
        compressed_fd = CompressedFile(f, compression_type,
                                       read_size=self.read_block_size)
        reference_fd = StringIO(self.content)

        # Note: content (readline) check must come before position (tell) check
        # because cStringIO's tell() reports out of bound positions (if we seek
        # beyond the file) up until a real read occurs.
        # _CompressedFile.tell() always stays within the bounds of the
        # uncompressed content.
        for seek_position in (-1, 0, 1,
                              len(self.content)-1, len(self.content),
                              len(self.content) + 1):
          compressed_fd.seek(seek_position, os.SEEK_SET)
          reference_fd.seek(seek_position, os.SEEK_SET)

          uncompressed_line = compressed_fd.readline()
          reference_line = reference_fd.readline()
          self.assertEqual(uncompressed_line, reference_line)

          uncompressed_position = compressed_fd.tell()
          reference_position = reference_fd.tell()
          self.assertEqual(uncompressed_position, reference_position)
Example #34
0
    def _render_zip(self, req, filename, repos, diff):
        """ZIP archive with all the added and/or modified files."""
        new_rev = diff.new_rev
        req.send_response(200)
        req.send_header('Content-Type', 'application/zip')
        req.send_header('Content-Disposition', 'attachment;'
                        'filename=%s.zip' % filename)

        from zipfile import ZipFile, ZipInfo, ZIP_DEFLATED

        buf = StringIO()
        zipfile = ZipFile(buf, 'w', ZIP_DEFLATED)
        for old_node, new_node, kind, change in repos.get_changes(**diff):
            if kind == Node.FILE and change != Changeset.DELETE:
                assert new_node
                zipinfo = ZipInfo()
                zipinfo.filename = new_node.path.encode('utf-8')
                # Note: unicode filenames are not supported by zipfile.
                # UTF-8 is not supported by all Zip tools either,
                # but as some does, I think UTF-8 is the best option here.
                zipinfo.date_time = time.gmtime(new_node.last_modified)[:6]
                zipinfo.compress_type = ZIP_DEFLATED
                zipfile.writestr(zipinfo, new_node.get_content().read())
        zipfile.close()

        buf.seek(0, 2) # be sure to be at the end
        req.send_header("Content-Length", buf.tell())
        req.end_headers()

        req.write(buf.getvalue())
class RawHTTPRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
	def setup(self):
		self.socket = self.request
		self.socket.handler = self
		self.rfile = StringIO()
		self.rfilePos = 0
		self.wfile = SingleSocketWriter(self.socket)

	def handle(self):
		pass

	def finish(self):
		pass

	def data_came_in(self, socket, data):
		self.rfile.write(data)
		self.rfile.seek(0, self.rfilePos)
		if self.rfile.read().find('\r\n\r\n') >= 0:
			self.rfile.seek(0, self.rfilePos)
			self.done = False
			self.handle_one_request()
			if self.done:
				socket.close()
				return
			self.rfilePos = self.rfile.tell()
		# TODO: Check close_connection (a la BaseHTTPRequestHandler.handle())

	def connection_flushed(self, unused_socket):
		self.wfile.flushed()
Example #36
0
class collector:
    """gathers input for PUT requests"""
    def __init__(self, handler, request, env):
        self.handler = handler
        self.env = env
        self.request = request
        self.data = StringIO()

        # make sure there's a content-length header
        self.cl = request.get_header('content-length')

        if not self.cl:
            request.error(411)
            return
        else:
            self.cl = int(self.cl)

    def collect_incoming_data(self, data):
        self.data.write(data)
        if self.data.tell() >= self.cl:
            self.data.seek(0)

            h = self.handler
            r = self.request

            # set the terminator back to the default
            self.request.channel.set_terminator('\r\n\r\n')
            del self.handler
            del self.request

            h.continue_request(self.data, r, self.env)
Example #37
0
class collector:

    """gathers input for PUT requests"""

    def __init__ (self, handler, request, env):
        self.handler    = handler
        self.env = env
        self.request    = request
        self.data = StringIO()

        # make sure there's a content-length header
        self.cl = request.get_header ('content-length')

        if not self.cl:
            request.error (411)
            return
        else:
            self.cl = int(self.cl)

    def collect_incoming_data (self, data):
        self.data.write (data)
        if self.data.tell() >= self.cl:
            self.data.seek(0)

            h=self.handler
            r=self.request

            # set the terminator back to the default
            self.request.channel.set_terminator ('\r\n\r\n')
            del self.handler
            del self.request

            h.continue_request (self.data, r, self.env)
Example #38
0
    def get_dialect(self):

        if self.dialect:
            return

        try:
            if self.buffer:
                flat_file = StringIO(self.buffer)
            else:
                flat_file = open(self.path, mode="rb")
            try:
                flat_file.seek(0)
                for line in range(self.skip_lines):
                    flat_file.readline()
                tell = flat_file.tell()

                sniffer = csv.Sniffer()
                self.dialect = sniffer.sniff(flat_file.read(20240))
                if self.dialect.delimiter not in [
                        ' ', '\t', '|', ',', ';', ':'
                ]:
                    raise csv.Error
                flat_file.seek(tell)
                if not self.skip_lines:
                    self.has_header = sniffer.has_header(flat_file.read(20240))
            except csv.Error:
                self.dialect = csv.excel
                self.has_header = True
            if self.dialect.delimiter == ' ':
                self.dialect.delimiter = ','
            if self.buffer:
                flat_file.seek(0)
        finally:
            flat_file.close()
Example #39
0
    def set_content(self, path, content, commit_msg=None):
        """ Add new content in `path` """

        # Create the stream
        stream = StringIO(content.encode('utf-8'))
        stream.seek(0, 2)
        streamlen = stream.tell()
        stream.seek(0)

        istream = IStream("blob", streamlen, stream)

        # Add it to the repository object database
        self.repo.odb.store(istream)

        # Create the corresponding blob object
        blob = Blob(self.repo, istream.binsha, 0100644, path.encode('utf-8'))

        # Commit
        self.repo.index.add([IndexEntry.from_blob(blob)])

        if not commit_msg:
            commit_msg = ugettext('Update Wiki: {0}').format(
                path.encode('utf-8')).encode('utf-8')

        self.repo.index.commit(commit_msg)

        # Update internal informations
        self._parse()
Example #40
0
    def render(self):
        book = Workbook(encoding='utf-8')
        uids = {'next': 0}

        root_organizations = self.get_root_organizations()
        for organization in root_organizations:
            sheet = book.add_sheet(organization.title)
            for index, cell in enumerate(TITLES_ORGANIZATION):
                sheet.row(0).write(index, cell)
            self.write_organization(sheet, organization, uids)

        root_registers = self.get_root_registers()
        for register in root_registers:
            sheet = book.add_sheet(register.title)
            for index, cell in enumerate(TITLES_REGISTER):
                sheet.row(0).write(index, cell)

            self.write_register(sheet, register, uids)

        filehandle = StringIO()
        if root_organizations or root_registers:
            book.save(filehandle)
        response = filehandle.getvalue()

        filehandle.seek(0, os.SEEK_END)
        filesize = filehandle.tell()
        filehandle.close()

        self.request.RESPONSE.setHeader('Content-disposition', 'export.xls')
        self.request.RESPONSE.setHeader('Content-Type', 'application/xls')
        self.request.RESPONSE.setHeader('Content-Length', filesize)

        return response
Example #41
0
    def response(self, pdu):
        if _debug: PickleActorMixIn._debug("response %r", pdu)

        # add the data to our buffer
        self.pickleBuffer += pdu.pduData

        # build a file-like object around the buffer
        strm = StringIO(self.pickleBuffer)

        pos = 0
        while (pos < strm.len):
            try:
                # try to load something
                msg = pickle.load(strm)
            except:
                break

            # got a message
            rpdu = PDU(msg)
            rpdu.update(pdu)

            super(PickleActorMixIn, self).response(rpdu)

            # see where we are
            pos = strm.tell()

        # save anything left over, if there is any
        if (pos < strm.len):
            self.pickleBuffer = self.pickleBuffer[pos:]
        else:
            self.pickleBuffer = ''
Example #42
0
 def list_directory(self, path):
     try:
         list = [os.curdir, os.pardir] + os.listdir(path)
     except os.error:
         self.send_error(404, "No permission to list directory")
         return None
     list.sort(key=lambda a: a.lower())
     f = StringIO()
     displaypath = cgi.escape(urllib.unquote(self.path))
     f.write('<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">')
     f.write("<html>\n<title>Directory listing for %s</title>\n" % displaypath)
     f.write("<body>\n<h2>Directory listing for %s</h2>\n" % displaypath)
     f.write("<hr>\n<ul>\n")
     for name in list:
         fullname = os.path.join(path, name)
         displayname = linkname = name
         # Append / for directories or @ for symbolic links
         if os.path.isdir(fullname):
             displayname = name + "/"
             linkname = name + "/"
         if os.path.islink(fullname):
             displayname = name + "@"
             # Note: a link to a directory displays with @ and links with /
         f.write('<li><a href="%s">%s</a>\n'
                 % (urllib.quote(linkname), cgi.escape(displayname)))
     f.write("</ul>\n<hr>\n</body>\n</html>\n")
     length = f.tell()
     f.seek(0)
     self.send_response(200)
     encoding = sys.getfilesystemencoding()
     self.send_header("Content-type", "text/html; charset=%s" % encoding)
     self.send_header("Content-Length", str(length))
     self.end_headers()
     return f
Example #43
0
 def _save(self, request, *args, **kwargs):
     if self.request.POST['type'] == "pixel":
         pass
     elif self.request.POST['type'] == u"data":
         target = self.target_model.objects.get(pk=self.kwargs['pk'])
         raw_data = self.request.POST['image'][len('data:image/png;base64,'):].decode('base64')
         buf = StringIO(raw_data)
         buf.seek(0, 2)
         filename = "%s.png" % target.pk
         file = InMemoryUploadedFile(buf, "picture", filename, None, buf.tell(), None)
         old_picture = getattr(target, self.target_field_name)
         try:
             old_picture.delete()
         except OSError:
             pass
         setattr(target, 'picture', file)
         target.save()
         new_picture = getattr(target, self.target_field_name)
         full_url = reverse('camera', args=[new_picture.url])
     return HttpResponse(json.dumps({
         'file': filename,
         'url': full_url,
         'old': str(old_picture),
         'refresh': "%s?%s" % (full_url, time.time())
     }))
Example #44
0
    def test_seek_cur(self):
        for compression_type in [
                CompressionTypes.BZIP2, CompressionTypes.GZIP
        ]:
            file_name = self._create_compressed_file(compression_type,
                                                     self.content)
            with open(file_name, 'rb') as f:
                compressed_fd = CompressedFile(f,
                                               compression_type,
                                               read_size=self.read_block_size)
                reference_fd = StringIO(self.content)

                # Test out of bound, inbound seeking in both directions
                for seek_position in (-1, 0, 1, len(self.content) / 2,
                                      len(self.content) / 2,
                                      -1 * len(self.content) / 2):
                    compressed_fd.seek(seek_position, os.SEEK_CUR)
                    reference_fd.seek(seek_position, os.SEEK_CUR)

                    uncompressed_line = compressed_fd.readline()
                    expected_line = reference_fd.readline()
                    self.assertEqual(uncompressed_line, expected_line)

                    reference_position = reference_fd.tell()
                    uncompressed_position = compressed_fd.tell()
                    self.assertEqual(uncompressed_position, reference_position)
Example #45
0
    def test_seek_set(self):
        for compression_type in [
                CompressionTypes.BZIP2, CompressionTypes.GZIP
        ]:
            file_name = self._create_compressed_file(compression_type,
                                                     self.content)
            with open(file_name, 'rb') as f:
                compressed_fd = CompressedFile(f,
                                               compression_type,
                                               read_size=self.read_block_size)
                reference_fd = StringIO(self.content)

                # Note: content (readline) check must come before position (tell) check
                # because cStringIO's tell() reports out of bound positions (if we seek
                # beyond the file) up until a real read occurs.
                # _CompressedFile.tell() always stays within the bounds of the
                # uncompressed content.
                for seek_position in (-1, 0, 1, len(self.content) - 1,
                                      len(self.content),
                                      len(self.content) + 1):
                    compressed_fd.seek(seek_position, os.SEEK_SET)
                    reference_fd.seek(seek_position, os.SEEK_SET)

                    uncompressed_line = compressed_fd.readline()
                    reference_line = reference_fd.readline()
                    self.assertEqual(uncompressed_line, reference_line)

                    uncompressed_position = compressed_fd.tell()
                    reference_position = reference_fd.tell()
                    self.assertEqual(uncompressed_position, reference_position)
Example #46
0
def crack_crc32(s, newcrc, init_crc=None, printstatus=False):
    raf = StringIO(s + '\x00' * 4)
    newcrc = int(bin(newcrc)[2:].rjust(32, '0')[::-1], 2)
    raf.seek(0, os.SEEK_END)
    length = raf.tell()
    offset = length - 4
    if offset + 4 > length:
        raise ValueError("Byte offset plus 4 exceeds file length")

    # Read entire file and calculate original CRC-32 value
    crc = get_crc32(raf, init_crc)
    if printstatus:
        print("Original CRC-32: {:08X}".format(reverse32(crc)))

    # Compute the change to make
    delta = crc ^ newcrc
    delta = multiply_mod(reciprocal_mod(pow_mod(2, (length - offset) * 8)), delta)

    # Patch 4 bytes in the file
    raf.seek(offset)
    bytes4 = bytearray(raf.read(4))
    if len(bytes4) != 4:
        raise IOError("Cannot read 4 bytes at offset")
    for i in range(4):
        bytes4[i] ^= (reverse32(delta) >> (i * 8)) & 0xFF
    return bytes4
Example #47
0
def popcorn(request, id):
    event = get_object_or_404(SuggestedEvent, pk=id)
    if event.user != request.user:
        return http.HttpResponseBadRequest('Not your event')
    if event.upcoming:
        return redirect(reverse('suggest:description', args=(event.pk, )))

    if request.method == 'POST':
        form = forms.PopcornForm(request.POST, instance=event)
        if form.is_valid():
            event = form.save()
            image_url = utils.find_open_graph_image_url(event.popcorn_url)
            if image_url:
                from django.core.files.uploadedfile import InMemoryUploadedFile
                import os
                from StringIO import StringIO
                image_content = requests.get(image_url).content
                buf = StringIO(image_content)
                # Seek to the end of the stream, so we can get its
                # length with `buf.tell()`
                buf.seek(0, 2)
                file = InMemoryUploadedFile(buf, "image",
                                            os.path.basename(image_url), None,
                                            buf.tell(), None)
                event.placeholder_img = file
                event.save()
            # XXX use next_url() instead?
            url = reverse('suggest:description', args=(event.pk, ))
            return redirect(url)
    else:
        initial = {}
        form = forms.PopcornForm(instance=event, initial=initial)

    data = {'form': form, 'event': event}
    return render(request, 'suggest/popcorn.html', data)
Example #48
0
 def list_directory(self, path):
     try:
         list = os.listdir(path)
     except os.error:
         self.send_error(404, "No permission to list directory")
         return None
     list.sort(key=lambda a: a.lower())
     displaypath = cgi.escape(urllib.unquote(self.path))
     jsonFiles = []
     ip=socket.gethostbyname(socket.gethostname())
     for name in list:
         fullname = os.path.join(path, name)
         displayname = linkname = name
         date_modified = time.ctime(os.path.getmtime(fullname))
         # Append / for directories or @ for symbolic links
         if os.path.isdir(fullname):
             displayname = name + "/"
             linkname = name + "/"
         if os.path.islink(fullname):
             displayname = name + "@"
         absurl= "http://" + ip + ":9998/" + urllib.quote(linkname)
         jsonFiles.append({'date_modified':date_modified,'url':absurl,'filename':cgi.escape(displayname)})
     json_data = json.dumps(jsonFiles, separators=(',',':'))
     f = StringIO()
     f.write('%s' % json_data)
     length = f.tell()
     f.seek(0)
     self.send_response(200)
     encoding = sys.getfilesystemencoding()
     self.send_header("Content-type", "application/json; charset=%s" % encoding)
     self.send_header("Content-Length", str(length))
     self.end_headers()
     return f
Example #49
0
    def _read_java_obj(cls, data, pos, ignore_remaining_data=False):
        data_stream = BytesIO(data[pos:])
        obj = javaobj.load(data_stream,
                           ignore_remaining_data=ignore_remaining_data)
        obj_size = data_stream.tell()

        return obj, pos + obj_size
Example #50
0
    def _render_zip(self, req, filename, repos, diff):
        """ZIP archive with all the added and/or modified files."""
        new_rev = diff.new_rev
        req.send_response(200)
        req.send_header('Content-Type', 'application/zip')
        req.send_header('Content-Disposition', 'attachment;'
                        'filename=%s.zip' % filename)

        from zipfile import ZipFile, ZipInfo, ZIP_DEFLATED

        buf = StringIO()
        zipfile = ZipFile(buf, 'w', ZIP_DEFLATED)
        for old_node, new_node, kind, change in repos.get_changes(**diff):
            if kind == Node.FILE and change != Changeset.DELETE:
                assert new_node
                zipinfo = ZipInfo()
                zipinfo.filename = new_node.path.encode('utf-8')
                # Note: unicode filenames are not supported by zipfile.
                # UTF-8 is not supported by all Zip tools either,
                # but as some does, I think UTF-8 is the best option here.
                zipinfo.date_time = time.gmtime(new_node.last_modified)[:6]
                zipinfo.compress_type = ZIP_DEFLATED
                zipfile.writestr(zipinfo, new_node.get_content().read())
        zipfile.close()

        buf.seek(0, 2) # be sure to be at the end
        req.send_header("Content-Length", buf.tell())
        req.end_headers()

        req.write(buf.getvalue())
Example #51
0
    def _send_command(self, command, host, port, timeout):
        sock = socket.socket()
        sock.settimeout(timeout)
        buf = StringIO()
        chunk_size = 1024
        # try-finally and try-except to stay compatible with python 2.4
        try:
            try:
                # Connect to the zk client port and send the stat command
                sock.connect((host, port))
                sock.sendall(command)

                # Read the response into a StringIO buffer
                chunk = sock.recv(chunk_size)
                buf.write(chunk)
                num_reads = 1
                max_reads = 10000
                while chunk:
                    if num_reads > max_reads:
                        # Safeguard against an infinite loop
                        raise Exception("Read %s bytes before exceeding max reads of %s. "
                                        % (buf.tell(), max_reads))
                    chunk = sock.recv(chunk_size)
                    buf.write(chunk)
                    num_reads += 1
            except (socket.timeout, socket.error):
                raise ZKConnectionFailure()
        finally:
            sock.close()
        return buf
Example #52
0
	def importData(self, rom, folder):
		read = self["read"].get()
		width, height = tuple(self.list("dimensions", "number"))

		# Create padding function...
		prev = 0
		padfunc = self.padfuncDef(width, height)

		# Prepare palette
		self.definePalette(self.list("palette", "tuple"))

		# Prepare data
		leftover = 0
		data = self.image.load()
		pixels = self["pixel"].get()
		stream = StringIO()
		for idx, x, y in self["read"].rect(width, height):
			leftover = pixels[idx % len(pixels)].write(stream, self, leftover, data[x, y])
			skip, leftover = padfunc(idx, prev, leftover, stream)
			if skip is not None:
				if skip: stream.write("\x00" * skip)
				prev = stream.tell()
			#endif
		#endfor

		# Commit to ROM
		reverse = self["reverse"].get({
			"w": width, "width": width,
			"h": height, "height": height,
		})
		if reverse: rom.write(reverseEvery(stream.getvalue(), reverse))
		else: rom.write(stream.getvalue())
Example #53
0
    def _send_command(self, command, host, port, timeout):
        sock = socket.socket()
        sock.settimeout(timeout)
        buf = StringIO()
        chunk_size = 1024
        # try-finally and try-except to stay compatible with python 2.4
        try:
            try:
                # Connect to the zk client port and send the stat command
                sock.connect((host, port))
                sock.sendall(command)

                # Read the response into a StringIO buffer
                chunk = sock.recv(chunk_size)
                buf.write(chunk)
                num_reads = 1
                max_reads = 10000
                while chunk:
                    if num_reads > max_reads:
                        # Safeguard against an infinite loop
                        raise Exception("Read %s bytes before exceeding max reads of %s. "
                                        % (buf.tell(), max_reads))
                    chunk = sock.recv(chunk_size)
                    buf.write(chunk)
                    num_reads += 1
            except (socket.timeout, socket.error):
                raise ZKConnectionFailure()
        finally:
            sock.close()
        return buf
Example #54
0
	def prepareImage(self):
		Graphic.prepareImage(self)
		width, height = tuple(self.list("dimensions", "number"))
		numPixels = width * height

		# Create padding function...
		prev = 0
		padfunc = self.padfuncDef(width, height)

		# Collect size of data to read
		pixels = self["pixel"].get()
		bytes = self.len(numPixels, padfunc, pixels)

		# Prepare palette
		palette = self.list("palette", "tuple")

		# Read pixels
		self.rpl.rom.seek(self.number("base"))
		reverse = self.resolve("reverse").get({
			"w": width, "width": width,
			"h": height, "height": height,
		})
		if reverse: stream = StringIO(reverseEvery(self.rpl.rom.read(bytes), reverse))
		else: stream = StringIO(self.rpl.rom.read(bytes))

		leftovers, data, prev = ["", 0], [], 0
		for i in helper.range(numPixels):
			data.append(pixels[i % len(pixels)].read(stream, palette, leftovers))
			tmp = padfunc(i, prev, leftovers, stream=stream)
			if tmp is not None:
				#print "Skipping %i bytes." % tmp
				stream.seek(tmp, 1)
				prev = stream.tell()
			#endif
		#endfor

		# Paste to image
		self.image.putdata(data)

		# Transform resultant image by read.
		# Since this is confusing, here's a full readout:
		# LRUD: Nothing
		# LRDU: FLIP_TOP_BOTTOM
		# RLUD: FLIP_LEFT_RIGHT
		# RLDU: FLIP_TOP_BOTTOM, FLIP_LEFT_RIGHT
		# UDLR: ROTATE_270, FLIP_TOP_BOTTOM
		# UDRL: ROTATE_90
		# DULR: ROTATE_270
		# DURL: ROTATE_90, FLIP_TOP_BOTTOM
		primary, secondary = self.resolve("read").ids()
		# if xxDU
		if secondary == 3: self.image = self.image.transpose(Image.FLIP_TOP_BOTTOM)
		# if xxLR
		elif secondary == 0: self.image = self.image.transpose(Image.ROTATE_270)
		# if xxRL
		elif secondary == 1: self.image = self.image.transpose(Image.ROTATE_90)
		# if RLxx
		if primary == 1: self.image = self.image.transpose(Image.FLIP_LEFT_RIGHT)
		# if UDLR or DURL
		elif primary - secondary == 2: self.image = self.image.transpose(Image.FLIP_TOP_BOTTOM)
Example #55
0
 def list_directory(self, realpath, path_parts, page, content, query) :
     if content == 'dir' :
         return self.list_dirs(realpath, path_parts)
     elif content == 'file' :
         if query :
             return self.list_search(path_parts, page, query)
         else :
             return self.list_files(realpath, path_parts, page)
     else :  # the list page!
         f = StringIO()
         f.write(self.server.page_head)
         if path_parts :
             f.write('js_dir = "%s";\n' % cgi.escape('/'.join(path_parts)))
         else :
             f.write('js_dir = "";\n')
         f.write('js_query = "%s";\n' % cgi.escape(query))
         f.write('js_page_index = %d;\n' % page)
         f.write(self.server.page_tail)
         length = f.tell()
         f.seek(0)
         self.send_response(200)
         self.send_header('Content-type', 'text/html')
         self.send_header("Content-Length", str(length))
         self.end_headers()
         return f
Example #56
0
    def __init__(self, file=u'', filename=None, mimeType=None, disablePreview=None, disablePrint=None):
        if file is None:
            file = u''

        if isinstance(file, basestring):
            file = StringIO(file)
        elif not hasattr(file, 'seek'):
            raise ValueError("File object is required.")

        self.file = file

        if filename is None:
            filename = os.path.split(getattr(file, 'filename', u''))[-1]

        self.filename = filename

        file.seek(0, 2)
        self.size = int(file.tell())

        if mimeType is None:
            mimeType = api.guessMimetype(file, self.filename)[0]

        self.mimeType = mimeType

        if disablePreview is None:
            disablePreview = False

        self.disablePreview = disablePreview

        if disablePrint is None:
            disablePrint = False

        self.disablePrint = disablePrint