Example #1
1
    def deserialize_tx_payload(self, data):
        msg = {}
        if type(data) is str:
            data = StringIO(data)

        msg["version"] = struct.unpack("<I", data.read(4))[0]

        msg["tx_in_count"] = self.deserialize_int(data)
        msg["tx_in"] = []
        for _ in xrange(msg["tx_in_count"]):
            tx_in = self.deserialize_tx_in(data)
            msg["tx_in"].append(tx_in)

        msg["tx_out_count"] = self.deserialize_int(data)
        msg["tx_out"] = []
        for _ in xrange(msg["tx_out_count"]):
            tx_out = self.deserialize_tx_out(data)
            msg["tx_out"].append(tx_out)

        msg["lock_time"] = struct.unpack("<I", data.read(4))[0]

        # Calculate hash from the entire payload
        payload = self.serialize_tx_payload(msg)
        msg["tx_hash"] = hexlify(sha256(sha256(payload))[::-1])

        return msg
Example #2
1
def cbk_write(buf):
    global proc, icecast
    data = StringIO(buf)
    data_l = len(buf)
    while data_l > data.tell():
        try:
            proc.stdin.write(data.read(4096))
            # logger.debug("Data: %d", data.tell())
        except IOError:
            logger.error("Restarting transcoder")
            proc.terminate()
            proc = Popen(settings.transcoder.split(" "), stdout=PIPE, stdin=PIPE)
        except Exception as e:
            logger.error(e)
        try:
            ready = select([proc.stdout], [], [], 0.5)
            if not len(ready[0]):
                continue
            tok = proc.stdout.read(4096)
            # logger.debug("Transcoded data: %d ", len(tok))
        except IOError:
            logger.error("Restarting transcoder")
            proc.terminate()
            proc = Popen(settings.transcoder.split(" "), stdout=PIPE, stdin=PIPE)
        except Exception as e:
            logger.error(e)
        try:
            icecast.send(tok)
        except Exception as e:
            logger.error(e)
            icecast.close()
            sleep(1)
            icecast = create_shout()
            icecast.open()
            logger.error("Failed connection with icecast server")
    def next(self, cr, uid, ids, context=None):
        this = self.browse(cr, uid, ids[0], context=context)
        file_data = base64.decodestring(this.file)
        zip_stream = StringIO()
        zip_stream.write(file_data)
        zip_obj = zipfile.ZipFile(zip_stream, mode="r", compression=zipfile.ZIP_DEFLATED)
        if zipfile.is_zipfile(zip_stream):
            report_obj = self.pool.get("ir.actions.report.xml")
            context["allformats"] = True
            mimetypes = dict(report_obj._get_in_mimetypes(cr, uid, context=context))
            styles_select = dict(report_obj._columns["styles_mode"].selection)
            if "data.xml" in zip_obj.namelist():
                data = zip_obj.read("data.xml")
            else:
                raise osv.except_osv(_("Error!"), _("Aeroo report file is invalid!"))
            tree = lxml.etree.parse(StringIO(data))
            root = tree.getroot()
            info = ""
            report = root.xpath("//data/record[@model='ir.actions.report.xml']")[0]
            style = root.xpath("//data/record[@model='report.stylesheets']")[0]
            rep_name = report.find("field[@name='name']").text
            rep_service = report.find("field[@name='report_name']").text
            rep_model = report.find("field[@name='model']").text
            rep_format = eval(report.find("field[@name='out_format']").attrib["search"], {})[0][2]
            rep_charset = report.find("field[@name='charset']").text
            parser_state = report.find("field[@name='parser_state']").text
            styles_mode = report.find("field[@name='styles_mode']").text
            tml_source = report.find("field[@name='tml_source']").text

            info += "Name: %s\n" % rep_name
            info += "Object: %s\n" % rep_model
            info += "Service Name: %s\n" % rep_service
            info += "Format: %s\n" % mimetypes.get(rep_format, "oo-odt")
            info += "Template: %s\n" % (tml_source == "parser" and "defined by parser" or "static")
            if rep_format == "genshi-raw":
                info += "Charset: %s\n" % rep_charset
            info += "Parser: %s\n" % (parser_state in ("def", "loc") and "customized" or "default")
            info += "Stylesheet: %s%s\n" % (
                styles_select[styles_mode].lower(),
                style is not None and " (%s)" % style.find("field[@name='name']").text,
            )
            self.write(
                cr,
                uid,
                ids,
                {"name": rep_service, "info": info, "state": "info", "file": base64.encodestring(data)},
                context=context,
            )
        else:
            raise osv.except_osv(_("Error!"), _("Is not Aeroo report file."))

        mod_obj = self.pool.get("ir.model.data")
        act_obj = self.pool.get("ir.actions.act_window")

        mod_id = mod_obj.search(cr, uid, [("name", "=", "action_aeroo_report_import_wizard")])[0]
        res_id = mod_obj.read(cr, uid, mod_id, ["res_id"])["res_id"]
        act_win = act_obj.read(cr, uid, res_id, [])
        act_win["domain"] = [("id", "in", ids)]
        act_win["context"] = {"default_ids": ids}
        return act_win
Example #4
0
    def __init__(self, cr, name, table, rml=False, parser=False, header=True, store=False):
        super(Aeroo_report, self).__init__(name, table, rml, parser, header, store)
        self.logger("registering %s (%s)" % (name, table), netsvc.LOG_INFO)
        self.active_prints = {}

        pool = pooler.get_pool(cr.dbname)
        ir_obj = pool.get("ir.actions.report.xml")
        name = name.startswith("report.") and name[7:] or name
        try:
            report_xml_ids = ir_obj.search(cr, 1, [("report_name", "=", name)])
            if report_xml_ids:
                report_xml = ir_obj.browse(cr, 1, report_xml_ids[0])
            else:
                report_xml = False

            if report_xml and report_xml.preload_mode == "preload":
                file_data = report_xml.report_sxw_content
                if not file_data:
                    self.logger("template is not defined in %s (%s) !" % (name, table), netsvc.LOG_WARNING)
                    template_io = None
                else:
                    template_io = StringIO()
                    template_io.write(base64.decodestring(file_data))
                    style_io = self.get_styles_file(cr, 1, report_xml)
                if template_io:
                    self.serializer = OOSerializer(template_io, oo_styles=style_io)
        except Exception, e:
            print e
Example #5
0
def get_ini(queryset):
    output = StringIO()
    config = ConfigParser()
    providerset = set()
    for sc in queryset:
        section = "servicecode {0}".format(sc.name)
        config.add_section(section)
        config.set(section, "provider", sc.provider)
        providerset.add(sc.provider)
        config.set(section, "country", sc.provider.country.config_name)
        config.set(section, "tariff", sc.tariff)
        config.set(section, "currency", sc.currency)
        for opt in sc.owns.all():
            config.set(section, opt.key.name, opt.value)
    for provider in providerset:
        section = "provider {0}".format(provider.name)
        config.add_section(section)
        config.set(section, "country", provider.country.config_name)
        config.set(section, "timeout", provider.timeout)
        config.set(section, "adaptor", provider.adaptor)
        config.set(section, "cdr_string", provider.cdr_string)
        extras = provider.specialization.all()
        for opt in provider.owns.all():
            config.set(section, opt.key.name, opt.value)
        if extras:
            config.set(section, "service", ",".join([extra.name for extra in extras]))
            for extra in extras:
                x_section = "extra {0} {1}".format(provider.name, extra.name)
                config.add_section(x_section)
                for opt in extra.owns.all():
                    config.set(x_section, opt.key.name, opt.value)
    config.write(output)
    return output.getvalue()
Example #6
0
    def __iter__(self):
        # TODO: speedup
        line = ""
        while True:
            data = self.read(-1)
            if not data:
                break
            generator = StringIO(data)
            assert "\n" not in line, line
            line += generator.next()
            if line.endswith("\n"):
                yield line
                line = ""

                ll = list(generator)
                if not ll:
                    continue

                for line in ll[:-1]:
                    yield line
                line = ll[-1]
                if line.endswith("\n"):
                    yield line
                    line = ""

        if line:
            yield line
def _update_key_value(infile, key, value):
    """
    Update hostname on system
    """
    outfile = StringIO()

    found = False
    for line in infile:
        line = line.strip()
        if "=" in line:
            k, v = line.split("=", 1)
            k = k.strip()
            if k == key:
                print >> outfile, "%s=%s" % (key, value)
                found = True
            else:
                print >> outfile, line
        else:
            print >> outfile, line

    if not found:
        print >> outfile, "%s=%s" % (key, value)

    outfile.seek(0)
    return outfile.read()
Example #8
0
    def test_dumpdata_uses_default_manager(self):
        """
        Regression for #11286
        Ensure that dumpdata honors the default manager
        Dump the current contents of the database as a JSON fixture
        """
        management.call_command("loaddata", "animal.xml", verbosity=0, commit=False)
        management.call_command("loaddata", "sequence.json", verbosity=0, commit=False)
        animal = Animal(name="Platypus", latin_name="Ornithorhynchus anatinus", count=2, weight=2.2)
        animal.save()

        stdout = StringIO()
        management.call_command("dumpdata", "fixtures_regress.animal", format="json", stdout=stdout)

        # Output order isn't guaranteed, so check for parts
        data = stdout.getvalue()

        # Get rid of artifacts like '000000002' to eliminate the differences
        # between different Python versions.
        data = re.sub("0{6,}\d", "", data)

        lion_json = '{"pk": 1, "model": "fixtures_regress.animal", "fields": {"count": 3, "weight": 1.2, "name": "Lion", "latin_name": "Panthera leo"}}'
        emu_json = '{"pk": 10, "model": "fixtures_regress.animal", "fields": {"count": 42, "weight": 1.2, "name": "Emu", "latin_name": "Dromaius novaehollandiae"}}'
        platypus_json = '{"pk": %d, "model": "fixtures_regress.animal", "fields": {"count": 2, "weight": 2.2, "name": "Platypus", "latin_name": "Ornithorhynchus anatinus"}}'
        platypus_json = platypus_json % animal.pk

        self.assertEqual(len(data), len("[%s]" % ", ".join([lion_json, emu_json, platypus_json])))
        self.assertTrue(lion_json in data)
        self.assertTrue(emu_json in data)
        self.assertTrue(platypus_json in data)
Example #9
0
def scale_image(img_upload, img_max_size):
    """Crop and scale an image file."""
    try:
        img = Image.open(img_upload)
    except IOError:
        return None

    src_width, src_height = img.size
    src_ratio = float(src_width) / float(src_height)
    dst_width, dst_height = img_max_size
    dst_ratio = float(dst_width) / float(dst_height)

    if dst_ratio < src_ratio:
        crop_height = src_height
        crop_width = crop_height * dst_ratio
        x_offset = int(float(src_width - crop_width) / 2)
        y_offset = 0
    else:
        crop_width = src_width
        crop_height = crop_width / dst_ratio
        x_offset = 0
        y_offset = int(float(src_height - crop_height) / 3)

    img = img.crop((x_offset, y_offset, x_offset + int(crop_width), y_offset + int(crop_height)))
    img = img.resize((dst_width, dst_height), Image.ANTIALIAS)

    if img.mode != "RGB":
        img = img.convert("RGB")
    new_img = StringIO()
    img.save(new_img, "JPEG")
    img_data = new_img.getvalue()

    return ContentFile(img_data)
Example #10
0
File: dill.py Project: mindw/dill
def _create_stringi(value, position, closed):
    f = StringIO(value)
    if closed:
        f.close()
    else:
        f.seek(position)
    return f
Example #11
0
    def profile(self, request):
        """Start/stop the python profiler, returns profile results"""
        profile = self.__dict__.get("_profile")
        if "start" in request.properties:
            if not profile:
                profile = self.__dict__["_profile"] = Profile()
            profile.enable()
            self._log(LOG_INFO, "Started python profiler")
            return (OK, None)
        if not profile:
            raise BadRequestStatus("Profiler not started")
        if "stop" in request.properties:
            profile.create_stats()
            self._log(LOG_INFO, "Stopped python profiler")
            out = StringIO()
            stats = pstats.Stats(profile, stream=out)
            try:
                stop = request.properties["stop"]
                if stop == "kgrind":  # Generate kcachegrind output using pyprof2calltree
                    from pyprof2calltree import convert

                    convert(stats, out)
                elif stop == "visualize":  # Start kcachegrind using pyprof2calltree
                    from pyprof2calltree import visualize

                    visualize(stats)
                else:
                    stats.print_stats()  # Plain python profile stats
                return (OK, out.getvalue())
            finally:
                out.close()
        raise BadRequestStatus("Bad profile request %s" % (request))
Example #12
0
class TBufferedTransport(TTransportBase, CReadableTransport):

    """Class that wraps another transport and buffers its I/O.

  The implementation uses a (configurable) fixed-size read buffer
  but buffers all writes until a flush is performed.
  """

    DEFAULT_BUFFER = 4096

    def __init__(self, trans, rbuf_size=DEFAULT_BUFFER):
        self.__trans = trans
        self.__wbuf = StringIO()
        self.__rbuf = StringIO("")
        self.__rbuf_size = rbuf_size

    def isOpen(self):
        return self.__trans.isOpen()

    def open(self):
        return self.__trans.open()

    def close(self):
        return self.__trans.close()

    def read(self, sz):
        ret = self.__rbuf.read(sz)
        if len(ret) != 0:
            return ret

        self.__rbuf = StringIO(self.__trans.read(max(sz, self.__rbuf_size)))
        return self.__rbuf.read(sz)

    def write(self, buf):
        self.__wbuf.write(buf)

    def flush(self):
        out = self.__wbuf.getvalue()
        # reset wbuf before write/flush to preserve state on underlying failure
        self.__wbuf = StringIO()
        self.__trans.write(out)
        self.__trans.flush()

    # Implement the CReadableTransport interface.
    @property
    def cstringio_buf(self):
        return self.__rbuf

    def cstringio_refill(self, partialread, reqlen):
        retstring = partialread
        if reqlen < self.__rbuf_size:
            # try to make a read of as much as we can.
            retstring += self.__trans.read(self.__rbuf_size)

        # but make sure we do read reqlen bytes.
        if len(retstring) < reqlen:
            retstring += self.__trans.readAll(reqlen - len(retstring))

        self.__rbuf = StringIO(retstring)
        return self.__rbuf
Example #13
0
    def save(self, *args, **kwargs):
        if self.image:
            img = Image.open(self.image.file)
            if img.mode not in ("L", "RGB"):
                img = img.convert("RGB")

            if self.carousel.width > 0 or self.carousel.height > 0:
                if self.carousel.width > 0 and self.carousel.height <= 0:
                    x = self.carousel.width
                    ratio = self.carousel.width / float(img.size[0])
                    y = int(ratio * img.size[1])
                elif self.carousel.height > 0 and self.carousel.width <= 0:
                    y = self.carousel.height
                    ratio = self.carousel.height / float(img.size[1])
                    x = int(ratio * img.size[0])
                else:
                    x, y = self.carousel.size()
                img = img.resize((x, y), Image.ANTIALIAS)

                ext = os.path.splitext(self.image.name)[-1][1:]
                img_format = ext
                if img_format.lower() == "jpg":
                    img_format = "JPEG"

                temp_handle = StringIO()
                img.save(temp_handle, img_format)
                temp_handle.seek(0)

                con_type = "image/%s" % ext

                suf = SimpleUploadedFile(ext, temp_handle.read(), content_type=con_type)
                fname = "%s.%s" % (os.path.splitext(self.image.name)[0], ext)
                self.image.save(fname, suf, save=False)

        super(CarouselItem, self).save()
Example #14
0
 def _read(self, name):
     memory_file = StringIO()
     try:
         o = self.client.get(self.prefix, name)
         memory_file.write(o.data)
     except sae.storage.ObjectNotExistsError, e:
         pass
Example #15
0
    def spellit(self, irc, msg, args, text):
        """<text>

        Returns <text>, phonetically spelled out.
        """
        d = {}
        if self.registryValue("spellit.replaceLetters"):
            d.update(self._spellLetters)
        if self.registryValue("spellit.replaceNumbers"):
            d.update(self._spellNumbers)
        if self.registryValue("spellit.replacePunctuation"):
            d.update(self._spellPunctuation)
        # A bug in unicode on OSX prevents me from testing this.
        ##         dd = {}
        ##         for (c, v) in d.iteritems():
        ##             dd[ord(c)] = unicode(v + ' ')
        ##         irc.reply(unicode(text).translate(dd))
        out = StringIO()
        write = out.write
        for c in text:
            try:
                c = d[c]
                write(" ")
            except KeyError:
                pass
            write(c)
        irc.reply(out.getvalue()[1:])
Example #16
0
 def create_thumbnail(self, size, quality=None):
     # invalidate the cache of the thumbnail with the given size first
     invalidate_cache(self.user, size)
     try:
         orig = self.avatar.storage.open(self.avatar.name, "rb").read()
         image = Image.open(StringIO(orig))
         quality = quality or AVATAR_THUMB_QUALITY
         (w, h) = image.size
         if w != size or h != size:
             if w > h:
                 diff = (w - h) / 2
                 image = image.crop((diff, 0, w - diff, h))
             else:
                 diff = (h - w) / 2
                 image = image.crop((0, diff, w, h - diff))
             if image.mode != "RGB":
                 image = image.convert("RGB")
             image = image.resize((size, size), AVATAR_RESIZE_METHOD)
             thumb = StringIO()
             image.save(thumb, AVATAR_THUMB_FORMAT, quality=quality)
             thumb_file = ContentFile(thumb.getvalue())
         else:
             thumb_file = ContentFile(orig)
         thumb = self.avatar.storage.save(self.avatar_name(size), thumb_file)
     except IOError:
         return  # What should we do here?  Render a "sorry, didn't work" img?
Example #17
0
 def save_processed(self, processed_crash):
     crash_id = processed_crash["uuid"]
     processed_crash = processed_crash.copy()
     f = StringIO()
     with closing(gzip.GzipFile(mode="wb", fileobj=f)) as fz:
         json.dump(processed_crash, fz, default=dates_to_strings_for_json)
     self._save_files(crash_id, {crash_id + self.config.jsonz_file_suffix: f.getvalue()})
Example #18
0
def test(msg, results):
    fp = StringIO()
    fp.write(msg)
    fp.seek(0)
    m = rfc822.Message(fp)
    i = 0

    for n, a in m.getaddrlist("to") + m.getaddrlist("cc"):
        if verbose:
            print "name:", repr(n), "addr:", repr(a)
        try:
            mn, ma = results[i][0], results[i][1]
        except IndexError:
            print "extra parsed address:", repr(n), repr(a)
            continue
        i = i + 1
        if mn == n and ma == a:
            if verbose:
                print "    [matched]"
        else:
            if verbose:
                print "    [no match]"
            print "not found:", repr(n), repr(a)

    out = m.getdate("date")
    if out:
        if verbose:
            print "Date:", m.getheader("date")
        if out == (1999, 1, 13, 23, 57, 35, 0, 0, 0):
            if verbose:
                print "    [matched]"
        else:
            if verbose:
                print "    [no match]"
            print "Date conversion failed:", out
Example #19
0
 def test_error_message(self):
     """
     (Regression for #9011 - error message is correct)
     """
     stderr = StringIO()
     management.call_command("loaddata", "bad_fixture2", "animal", verbosity=0, commit=False, stderr=stderr)
     self.assertEqual(stderr.getvalue(), "No fixture data found for 'bad_fixture2'. (File format may be invalid.)\n")
Example #20
0
 def testReadLines(self):
     # "Test BZ2File.readlines()"
     self.createTempFile()
     with BZ2File(self.filename) as bz2f:
         self.assertRaises(TypeError, bz2f.readlines, None)
         sio = StringIO(self.TEXT)
         self.assertEqual(bz2f.readlines(), sio.readlines())
Example #21
0
    def from_python(self, data):
        data = self._batch_method("to_python", data)

        if self.format == "ini":
            output = StringIO()
            old_data = data

            data, defaults = {}, {}

            for key, value in old_data.items():
                if isinstance(value, dict):
                    data[key] = value
                else:
                    defaults[key] = value

            config = ConfigParser(defaults=defaults)

            for section, subdata in data.items():
                if not config.has_section(section):
                    config.add_section(section)

                for key, value in subdata.items():
                    config.set(section, key, str(value))

            config.write(output)

            output.seek(0)
            return output.read()
        elif self.format == "json":
            kwargs = self._prepare_json_kwargs(dumps=True)
            return json.dumps(data, **kwargs)
        elif self.format == "pickle":
            return pickle.dumps(data)

        raise ImproperlyConfigured("File format %r is not supported." % self.format)
Example #22
0
 def testXReadLines(self):
     # "Test BZ2File.xreadlines()"
     self.createTempFile()
     bz2f = BZ2File(self.filename)
     sio = StringIO(self.TEXT)
     self.assertEqual(list(bz2f.xreadlines()), sio.readlines())
     bz2f.close()
Example #23
0
    def load_resource(self, resource, options=None, **new_options):
        options = options or utils.merged_dict(self.options, new_options)

        if isinstance(resource, basestring):
            if re.match(r"https?://", resource):
                self.logger.info("Fetching remote resource: " + resource)
                contents = urllib2.urlopen(resource).read()

            else:
                directory = options.get("directory", "")
                location = os.path.join(directory, resource)

                # Extract the contents so we can close the file
                with open(location, "rb") as resource_file:
                    contents = resource_file.read()

            # StringIO implements a fuller file-like object
            resource_name = resource
            resource = StringIO(contents)

        else:
            # Totally not designed for large files!!
            # We need a multiread resource, so wrap it in StringIO
            if not hasattr(resource, "seek"):
                resource = StringIO(resource.read())

            resource_name = getattr(resource, "name", "Unknown")

        return (resource, resource_name)
Example #24
0
def test_latex_units():
    """
    Check to make sure that Latex and AASTex writers attempt to fall
    back on the **unit** attribute of **Column** if the supplied
    **latexdict** does not specify units.
    """
    t = table.Table([table.Column(name="date", data=["a", "b"]), table.Column(name="NUV exp.time", data=[1, 2])])
    latexdict = copy.deepcopy(ascii.latexdicts["AA"])
    latexdict["units"] = {"NUV exp.time": "s"}
    out = StringIO()
    expected = """\
\\begin{table}{cc}
\\tablehead{\\colhead{date} & \\colhead{NUV exp.time}\\\\ \\colhead{ } & \\colhead{s}}
\\startdata
a & 1 \\\\
b & 2 \\\\
\\enddata
\\end{table}
"""
    ascii.write(t, out, format="aastex", latexdict=latexdict)
    assert out.getvalue() == expected
    # use unit attribute instead
    t["NUV exp.time"].unit = units.s
    t["date"].unit = units.yr
    out = StringIO()
    ascii.write(t, out, format="aastex", latexdict=ascii.latexdicts["AA"])
    assert out.getvalue() == expected.replace("colhead{s}", "colhead{$\mathrm{s}$}").replace(
        "colhead{ }", "colhead{$\mathrm{yr}$}"
    )
Example #25
0
 def _get_store_info(self, value, min_compress_len):
     flags = 0
     if isinstance(value, unicode):
         value = value.encode("utf-8")
         min_compress_len = 0
     elif isinstance(value, str):
         pass
     elif isinstance(value, int):
         flags |= _FLAG_INTEGER
         value = "%d" % value
         min_compress_len = 0
     elif isinstance(value, long):
         flags |= _FLAG_LONG
         value = "%d" % value
     else:
         flags |= _FLAG_PICKLE
         f = StringIO()
         pickler = pickle.Pickler(f)
         pickler.dump(value)
         value = f.getvalue()
     lv = len(value)
     if min_compress_len and lv > min_compress_len:
         comp_val = zlib.compress(value)
         if len(comp_val) < lv:
             flags |= _FLAG_COMPRESSED
             value = comp_val
     return flags, value
Example #26
0
File: pil.py Project: kkung/thumbor
    def read(self, extension=None, quality=None):
        if quality is None:
            quality = self.context.request.quality
        # returns image buffer in byte format.
        img_buffer = StringIO()

        ext = extension or self.extension
        options = {"quality": quality}
        if ext == ".jpg" or ext == ".jpeg":
            options["optimize"] = True
            options["progressive"] = True

        if self.icc_profile is not None:
            options["icc_profile"] = self.icc_profile

        try:
            self.image.save(img_buffer, FORMATS[ext], **options)
        except IOError:
            logger.warning("Could not save as improved image, consider to increase ImageFile.MAXBLOCK")
            self.image.save(img_buffer, FORMATS[ext])
        except KeyError:
            # extension is not present or could not help determine format => force JPEG
            # TODO : guess format by image headers maybe
            if self.image.mode in ["P", "RGBA", "LA"]:
                self.image.format = FORMATS[".png"]
                self.image.save(img_buffer, FORMATS[".png"])
            else:
                self.image.format = FORMATS[".jpg"]
                self.image.save(img_buffer, FORMATS[".jpg"])

        results = img_buffer.getvalue()
        img_buffer.close()
        return results
Example #27
0
 def dumps(self, arg, proto=0):
     f = StringIO()
     p = cPickle.Pickler(f, proto)
     p.fast = 1
     p.dump(arg)
     f.seek(0)
     return f.read()
 def getfile(self, file, rev):
     # TODO: ra.get_file transmits the whole file instead of diffs.
     if file in self.removed:
         raise IOError
     mode = ""
     try:
         new_module, revnum = revsplit(rev)[1:]
         if self.module != new_module:
             self.module = new_module
             self.reparent(self.module)
         io = StringIO()
         info = svn.ra.get_file(self.ra, file, revnum, io)
         data = io.getvalue()
         # ra.get_file() seems to keep a reference on the input buffer
         # preventing collection. Release it explicitly.
         io.close()
         if isinstance(info, list):
             info = info[-1]
         mode = ("svn:executable" in info) and "x" or ""
         mode = ("svn:special" in info) and "l" or mode
     except SubversionException, e:
         notfound = (svn.core.SVN_ERR_FS_NOT_FOUND, svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
         if e.apr_err in notfound:  # File not found
             raise IOError
         raise
Example #29
0
def pickle_unpickle(result):
    result2 = loads(dumps(result))
    out1 = StringIO()
    out2 = StringIO()
    result.show(out=out1)
    result2.show(out=out2)
    assert out1.getvalue() == out2.getvalue()
Example #30
0
def unparse(input_dict, output=None, encoding="utf-8", full_document=True, **kwargs):
    """Emit an XML document for the given `input_dict` (reverse of `parse`).

    The resulting XML document is returned as a string, but if `output` (a
    file-like object) is specified, it is written there instead.

    Dictionary keys prefixed with `attr_prefix` (default=`'@'`) are interpreted
    as XML node attributes, whereas keys equal to `cdata_key`
    (default=`'#text'`) are treated as character data.

    The `pretty` parameter (default=`False`) enables pretty-printing. In this
    mode, lines are terminated with `'\n'` and indented with `'\t'`, but this
    can be customized with the `newl` and `indent` parameters.

    """
    if full_document and len(input_dict) != 1:
        raise ValueError("Document must have exactly one root.")
    must_return = False
    if output is None:
        output = StringIO()
        must_return = True
    content_handler = XMLGenerator(output, encoding)
    if full_document:
        content_handler.startDocument()
    for key, value in input_dict.items():
        _emit(key, value, content_handler, full_document=full_document, **kwargs)
    if full_document:
        content_handler.endDocument()
    if must_return:
        value = output.getvalue()
        try:  # pragma no cover
            value = value.decode(encoding)
        except AttributeError:  # pragma no cover
            pass
        return value