Beispiel #1
0
def test_read_numeric():
    # make reader-like thing
    str_io = cStringIO()
    r = _make_readerlike(str_io)
    # check simplest of tags
    for base_dt, val, mdtype in (
        ('u2', 30, mio5p.miUINT16),
        ('i4', 1, mio5p.miINT32),
        ('i2', -1, mio5p.miINT16)):
        for byte_code in ('<', '>'):
            r.byte_order = byte_code
            c_reader = m5u.VarReader5(r)
            yield assert_equal, c_reader.little_endian, byte_code == '<'
            yield assert_equal, c_reader.is_swapped, byte_code != boc.native_code
            for sde_f in (False, True):
                dt = np.dtype(base_dt).newbyteorder(byte_code)
                a = _make_tag(dt, val, mdtype, sde_f)
                a_str = a.tostring()
                _write_stream(str_io, a_str)
                el = c_reader.read_numeric()
                yield assert_equal, el, val
                # two sequential reads
                _write_stream(str_io, a_str, a_str)
                el = c_reader.read_numeric()
                yield assert_equal, el, val
                el = c_reader.read_numeric()
                yield assert_equal, el, val
Beispiel #2
0
def test_read_stream():
    tag = _make_tag('i4', 1, mio5p.miINT32, sde=True)
    tag_str = tag.tostring()
    str_io = cStringIO(tag_str)
    st = streams.make_stream(str_io)
    s = streams._read_into(st, tag.itemsize)
    yield assert_equal, s, tag.tostring()
Beispiel #3
0
 def convertToJPGandResize(self, ratio, infile, outfile, cropme):
     if self.ispdfxa != "None" and not self.printer.get(
             "c_figplaceholders"):
         fmt = "CMYK"
     else:
         fmt = "RGB"
     with open(infile, "rb") as inf:
         rawdata = inf.read()
     newinf = cStringIO(rawdata)
     im = Image.open(newinf)
     if cropme:
         im = self.cropBorder(im)
     p = im.load()
     iw = im.size[0]
     ih = im.size[1]
     if ratio is not None and iw / ih < ratio:
         onlyRGBAimage = im.convert("RGBA")
         newWidth = int(ih * ratio)
         compimg = Image.new("RGBA", (newWidth, ih),
                             color=(255, 255, 255, 255))
         compimg.alpha_composite(onlyRGBAimage, (int(
             (newWidth - iw) / 2), 0))
         iw = compimg.size[0]
         ih = compimg.size[1]
         newimage = compimg.convert(fmt)
     elif im.mode != fmt:
         newimage = im.convert(fmt)
     else:
         newimage = im
     if fmt == "CMYK":
         self.cmytocmyk(newimage)
     newimage.save(outfile)
     return True
Beispiel #4
0
def test_zero_byte_string():
    # Tests hack to allow chars of non-zero length, but 0 bytes
    # make reader-like thing
    str_io = cStringIO()
    r = _make_readerlike(str_io, boc.native_code)
    c_reader = m5u.VarReader5(r)
    tag_dt = np.dtype([('mdtype', 'u4'), ('byte_count', 'u4')])
    tag = np.zeros((1,), dtype=tag_dt)
    tag['mdtype'] = mio5p.miINT8
    tag['byte_count'] = 1
    hdr = m5u.VarHeader5()
    # Try when string is 1 length
    hdr.set_dims([1,])
    _write_stream(str_io, tag.tostring() + asbytes('        '))
    str_io.seek(0)
    val = c_reader.read_char(hdr)
    assert_equal(val, ' ')
    # Now when string has 0 bytes 1 length
    tag['byte_count'] = 0
    _write_stream(str_io, tag.tostring())
    str_io.seek(0)
    val = c_reader.read_char(hdr)
    assert_equal(val, ' ')
    # Now when string has 0 bytes 4 length
    str_io.seek(0)
    hdr.set_dims([4,])
    val = c_reader.read_char(hdr)
    assert_array_equal(val, [' '] * 4)
Beispiel #5
0
def string_io(data=None):  # cStringIO can't handle unicode
    '''
    Pass data through to stringIO module and return result
    '''
    try:
        return cStringIO(bytes(data))
    except (UnicodeEncodeError, TypeError):
        return StringIO(data)
Beispiel #6
0
def unzip(contents: bytes) -> bytes:
    mod_logging.debug('Unzipping %s bytes' % len(contents))
    zip_file = mod_zipfile.ZipFile(cStringIO(contents))
    zip_info_list = zip_file.infolist()
    zip_info = zip_info_list[0]
    result = zip_file.open(zip_info).read()
    mod_logging.debug('Unzipped')
    return result
Beispiel #7
0
def unzip(contents: bytes) -> bytes:
    mod_logging.debug('Unzipping %s bytes' % len(contents))
    zip_file = mod_zipfile.ZipFile(cStringIO(contents))
    zip_info_list = zip_file.infolist()
    for zi in zip_info_list:
        if zi.filename[0] != ".":
            result = zip_file.open(zi).read()
            mod_logging.debug('Unzipped')
            return result
    raise Exception(f"No valid file found in {zip_info_list}")
Beispiel #8
0
def zip(contents: bytes, file_name: str) -> bytes:
    mod_logging.debug('Zipping %s bytes' % len(contents))
    result = cStringIO()
    zip_file = mod_zipfile.ZipFile(result, 'w', mod_zipfile.ZIP_DEFLATED,
                                   False)
    zip_file.writestr(file_name, contents)
    zip_file.close()
    result.seek(0)
    mod_logging.debug('Zipped')
    return result.read()
Beispiel #9
0
def setup():
    val = asbytes('a\x00string')
    global fs, gs, cs, fname
    fd, fname = mkstemp()
    fs = os.fdopen(fd, 'wb')
    fs.write(val)
    fs.close()
    fs = open(fname, 'rb')
    gs = BytesIO(val)
    cs = cStringIO(val)
Beispiel #10
0
def test_read_numeric_writeable():
    # make reader-like thing
    str_io = cStringIO()
    r = _make_readerlike(str_io, '<')
    c_reader = m5u.VarReader5(r)
    dt = np.dtype('<u2')
    a = _make_tag(dt, 30, mio5p.miUINT16, 0)
    a_str = a.tostring()
    _write_stream(str_io, a_str)
    el = c_reader.read_numeric()
    yield assert_true, el.flags.writeable
Beispiel #11
0
    def test_stringio_output(self):
        from sh import echo
        if IS_PY3:
            from io import StringIO
            from io import BytesIO as cStringIO
        else:
            from StringIO import StringIO
            from cStringIO import StringIO as cStringIO

        out = StringIO()
        echo("-n", "testing 123", _out=out)
        self.assertEqual(out.getvalue(), "testing 123")

        out = cStringIO()
        echo("-n", "testing 123", _out=out)
        self.assertEqual(out.getvalue().decode(), "testing 123")
Beispiel #12
0
    def test_stringio_output(self):
        from sh import echo
        if IS_PY3:
            from io import StringIO
            from io import BytesIO as cStringIO
        else:
            from StringIO import StringIO
            from cStringIO import StringIO as cStringIO

        out = StringIO()
        echo("-n", "testing 123", _out=out)
        self.assertEqual(out.getvalue(), "testing 123")

        out = cStringIO()
        echo("-n", "testing 123", _out=out)
        self.assertEqual(out.getvalue().decode(), "testing 123")
Beispiel #13
0
    def parse_header(self):
        """docstring for parse_header"""
        # Pre-process
        ascfile = self.HTTPArray(self.header)
        max_header_size = 1024 * 1024
        lines = cStringIO(ascfile[:max_header_size].data[:])
        while True:
            l = lines.readline()
            if self._eof in l: break

            self.parse_line(l, lines)

        hoff = lines.tell()
        if self.header != self.filename:
            hoff = 0
        self.parameters['header_offset'] = hoff
Beispiel #14
0
    def load_streamed_chunk(self, s3_key, pipe, suffix='.gz'):
        #conn, bucket = s3conf
        cli = self.cli
        bucket = self.bucket
        assert pipe
        key = s3_key + suffix
        use_rr = False

        mpu = bucket.initiate_multipart_upload(key,
                                               reduced_redundancy=use_rr,
                                               metadata={'header': 'test'})

        stream = cStringIO()

        compressor = gzip.GzipFile(fileobj=stream, mode='wb')

        uploaded = 0

        @timeit
        def uploadPart(partCount=[0]):
            global total_comp
            partCount[0] += 1
            stream.seek(0)
            mpu.upload_part_from_file(stream, partCount[0])
            total_comp += stream.tell()

            stream.seek(0)
            stream.truncate()

        @timeit
        def upload_to_s3(dump_file=None):
            global total_size, total_comp
            i = 0

            while True:  # until EOF
                #print(stream.tell() )
                i += 1
                start_time = time.time()
                chunk = pipe.read(self.write_row_cnt)

                if not chunk:  # EOF?
                    compressor.close()
                    uploadPart()
                    mpu.complete_upload()
                    #break
                    if 1:
                        if pipe.cnt in [0]:  #cleanup/ delete empty file
                            bucket.delete_key(key)
                        else:
                            log.debug(
                                '%d: %s/%s [%s sec]' %
                                (pipe.file_id, self.convertSize(total_size),
                                 self.convertSize(total_comp),
                                 round((time.time() - start_time), 2)))

                        break
                if sys.version_info[0] < 3 and isinstance(chunk, unicode):
                    compressor.write(chunk.encode('utf-8'))
                else:
                    compressor.write(chunk)
                total_size += len(chunk)
                if dump_file:
                    #pp(chunk)
                    dump_file.write(chunk)
                if stream.tell(
                ) > 10 << 20:  # min size for multipart upload is 5242880

                    uploadPart()
                #log.info ('S3: Uploaded: File_%d: Chunk_%d: %s [%s sec]' % (pipe.file_id,i, self.convertSize(len(chunk)),round((time.time() - start_time),2)))
                #print('S3 key: %s' % key)

        if 0 and cli.dump:

            dn = os.path.dirname(s3_key)
            bn = os.path.basename(s3_key)
            dump_dir = os.path.join('dump', dn)

            if not os.path.isdir(dump_dir):
                os.makedirs(dump_dir)
            dump_fn = os.path.join(dump_dir, '%s.gz' % (bn))

            log.debug('Dump: %s' % os.path.abspath(dump_fn))
            with gzip.GzipFile(dump_fn, mode='w') as c:
                upload_to_s3(c)
        else:
            upload_to_s3(None)

        return key
Beispiel #15
0
    def upload_data(self, data, target, out):
        global rid
        print(444, data)
        fname = 'file_%d_%d.%s.%s.csv' % (data.chunk_id, len(
            data.data), data.current_ts, data.actor)
        s3_key = '%s/%s/%s' % (target['targetDir'],
                               self.cli.tcfg['targetTable'], fname)

        rid = 0
        if not hasattr(out, 'file_names'):
            out.file_names = []
            out.keys = []
        print(444, data)
        assert data
        suffix = '.gz'
        key = s3_key + suffix
        use_rr = False

        mpu = self.bucket.initiate_multipart_upload(
            key, reduced_redundancy=use_rr, metadata={'header': 'test'})

        stream = cStringIO()

        compressor = gzip.GzipFile(fileobj=stream, mode='wb')

        uploaded = 0

        @timeit
        def uploadPart(partCount=[0]):
            global total_comp
            partCount[0] += 1
            stream.seek(0)
            mpu.upload_part_from_file(stream, partCount[0])
            total_comp += stream.tell()

            stream.seek(0)
            stream.truncate()

        @timeit
        def upload_to_s3():
            global total_size, total_comp, rid
            i = 0

            while True:  # until EOF
                i += 1
                start_time = time.time()
                chunk = ''
                #pp(data[0])
                tmp = []

                if rid < len(data.data):
                    tmp = data.data[rid:][:s3_rows]

                    chunk = os.linesep.join(tmp) + os.linesep

                #print rid, len(chunk), len(data)
                rid += len(tmp)
                if not chunk:  # EOF?
                    compressor.close()
                    uploadPart()
                    mpu.complete_upload()
                    log.info('Uploaded: s3://%s/%s' % (self.bname, key))
                    #e()
                    break
                else:
                    if sys.version_info[0] < 3 and isinstance(chunk, unicode):
                        compressor.write(chunk.encode('utf-8'))
                    else:
                        compressor.write(chunk)
                    total_size += len(chunk)
                    if stream.tell(
                    ) > 10 << 20:  # min size for multipart upload is 5242880

                        uploadPart()

        upload_to_s3()
        out.file_names.append(fname + suffix)
        out.keys.append(key)
        return out
Beispiel #16
0
def s3_upload_rows(bucket, s3_key, data, suffix='.gz'):

    rid = 0

    assert data
    key = s3_key + suffix
    use_rr = False

    mpu = bucket.initiate_multipart_upload(key,
                                           reduced_redundancy=use_rr,
                                           metadata={'header': 'test'})

    stream = cStringIO()

    compressor = gzip.GzipFile(fileobj=stream, mode='wb')

    uploaded = 0

    #@timeit
    def uploadPart(partCount=[0]):
        global total_comp
        partCount[0] += 1
        stream.seek(0)
        mpu.upload_part_from_file(stream, partCount[0])
        total_comp += stream.tell()

        stream.seek(0)
        stream.truncate()

    #@timeit
    def upload_to_s3():
        global total_size, total_comp, rid
        i = 0

        while True:  # until EOF
            i += 1
            start_time = time.time()
            chunk = ''
            #pp(data[0])
            tmp = []
            if rid < len(data):
                tmp = data[rid:][:s3_rows]
                chunk = os.linesep.join(tmp) + os.linesep

            #print rid, len(chunk), len(data)
            rid += len(tmp)
            if not chunk:  # EOF?
                compressor.close()
                uploadPart()
                mpu.complete_upload()
                break
            else:
                if sys.version_info[0] < 3 and isinstance(chunk, unicode):
                    compressor.write(chunk.encode('utf-8'))
                else:
                    compressor.write(chunk)
                total_size += len(chunk)
                if stream.tell(
                ) > 10 << 20:  # min size for multipart upload is 5242880

                    uploadPart()

    upload_to_s3()

    return key
    def generate_xls_report(self):
        domain = []
        if not self.session_id and not self.location_id:
            raise Warning(_('You have to select atleast one option from Sesssion Or Location '))
        if self.session_id:
            domain.append(('session_id', '=', self.session_id.id))
        if self.start_date:
            domain.append(('date_order', '>=', self.get_datetime_timezone(self.start_date + " 00:00:00")))
        if self.end_date:
            domain.append(('date_order', '<=', self.get_datetime_timezone(self.end_date + " 23:59:59")))

        if self.location_id:
            domain.append(('location_id', '=', self.location_id.id))

        pos_order_ids = self.env['pos.order'].search(domain)
        sale_order_ids = self.env['sale.order'].search(domain)

        if not pos_order_ids and not sale_order_ids:
            raise Warning(_('No Record found.'))
        styleP = xlwt.XFStyle()
        stylePC = xlwt.XFStyle()
        pattern = xlwt.Pattern()
        pattern.pattern = xlwt.Pattern.SOLID_PATTERN
        pattern.pattern_fore_colour = xlwt.Style.colour_map['gray25']
        stylePC.pattern = pattern
        alignment = xlwt.Alignment()
        alignment.horz = xlwt.Alignment.HORZ_CENTER
        styleP.alignment = alignment
        stylePC.alignment = alignment
        workbook = xlwt.Workbook(encoding="utf-8")
        worksheet = workbook.add_sheet("Report Sales Journal")
        worksheet.write(0, 0, 'ORDER REFERENCE', style=stylePC)
        worksheet.write(0, 1, 'RECEIPT REFERENCE', style=stylePC)
        worksheet.write(0, 2, 'DOCUMENT TYPE', style=stylePC)
        worksheet.write(0, 3, 'NAME OF CUSTOMER', style=stylePC)
        worksheet.write(0, 4, 'OUT / IN', style=stylePC)
        worksheet.write(0, 5, 'EXCHANGE OLD BATTERY', style=stylePC)
        worksheet.write(0, 6, 'MODELO', style=stylePC)
        worksheet.write(0, 7, 'CODIGO BATTERY', style=stylePC)
        worksheet.write(0, 8, 'ORDER DATE', style=stylePC)
        worksheet.write(0, 9, 'SELLER OR CASHIER', style=stylePC)
        worksheet.write(0, 10, 'TOTAL', style=stylePC)
        worksheet.write(0, 11, 'STATE', style=stylePC)
        worksheet.write(0, 12, 'SESSION', style=stylePC)
        worksheet.write(0, 13, 'NÚMERO DE COMPROBANTE', style=stylePC)
        worksheet.write(0, 14, 'ESTADO EMISIÓN', style=stylePC)
        state_dict = {'draft': 'New', 'cancel': 'Cancelled', 'paid': 'Paid', 'done': 'Posted', 'invoiced': 'Invoiced', 'sale': 'Confirm'}
        for col_number in range(0, 14):
            worksheet.col(col_number).width = 5200
        rows = 1
        grand_total = 0
        # pos_order
        for order in pos_order_ids:
            for line in order.lines:
                if line.back_order:
                    document_type = "PRODUCT RETURN"
                elif order.state == "invoiced":
                    document_type = "INVOICE"
                else:
                    document_type = "TICKET"
                worksheet.write(rows, 0, order.name)
                worksheet.write(rows, 1, order.pos_reference)
                worksheet.write(rows, 2, document_type)
                worksheet.write(rows, 3, order.partner_id and order.partner_id.name or "")
                worksheet.write(rows, 4, 1 if line.stock_income else -1, style=styleP)
                worksheet.write(rows, 5, "OK" if line.exchange_product else "", style=styleP)
                worksheet.write(rows, 6, line.product_id.name, style=styleP)
                worksheet.write(rows, 7, line.prodlot_id.name if line.prodlot_id else "", style=styleP)
                worksheet.write(rows, 8, self.get_datetime_timezone(order.date_order, fetch_argu=True))
                worksheet.write(rows, 9, order.user_id.name or "")
                worksheet.write(rows, 10, line.price_subtotal_incl, style=styleP)
                worksheet.write(rows, 11, state_dict[order.state])
                worksheet.write(rows, 12, order.session_id.name)
                worksheet.write(rows, 13, order.invoice_id.move_name)
                worksheet.write(rows, 14, order.invoice_id.estado_emision)
                grand_total += line.price_subtotal_incl
                rows += 1
            rows += 1
        # sale_order
        for order in sale_order_ids:
            for line in order.order_line:
                worksheet.write(rows, 0, order.name)
                worksheet.write(rows, 1, '-')
                worksheet.write(rows, 2, "SALE ORDER")
                worksheet.write(rows, 3, order.partner_id and order.partner_id.name or "")
                worksheet.write(rows, 4, -1, style=styleP)
                worksheet.write(rows, 5, "", style=styleP)
                worksheet.write(rows, 6, line.product_id.name, style=styleP)
                worksheet.write(rows, 7, line.lot_id.name if line.lot_id else "", style=styleP)
                worksheet.write(rows, 8, self.get_datetime_timezone(order.date_order, fetch_argu=True))
                worksheet.write(rows, 9, order.user_id.name or "")
                worksheet.write(rows, 10, line.price_subtotal, style=styleP)
                worksheet.write(rows, 11, state_dict[order.state])
                worksheet.write(rows, 12, order.session_id.name)
                grand_total += line.price_subtotal
                rows += 1
            rows += 1
        for col_number in range(0, 13):
            if col_number == 9:
                worksheet.write(rows, 9, "Total", style=stylePC)
            elif col_number == 10:
                worksheet.write(rows, 10, grand_total, style=stylePC)
            else:
                worksheet.write(rows, col_number, "", style=stylePC)
        file_data = cStringIO()
        workbook.save(file_data)
        session_name = '_'+ self.session_id.name if self.session_id else ""
        stock_name = ''
        if self.location_id:
            stock_name = '_'
            if self.location_id.location_id:
                stock_name +=  self.location_id.location_id.name + '/'
            stock_name += self.location_id.name
        # stock_name = '_'+ self.location_id.name if self.location_id else ""
        self.write({
            'state': 'get',
            'data': base64.encodestring(file_data.getvalue()),
            'name': 'reporte_diario_ventas'+session_name+''+stock_name+'.xls'
        })
        return {
            'name': 'Report Sales Journal',
            'type': 'ir.actions.act_window',
            'res_model': 'wizard.report.sale.journal',
            'view_mode': 'form',
            'view_type': 'form',
            'res_id': self.id,
            'target': 'new'
        }
def nostdout():
    save_stdout = sys.stdout
    sys.stdout = cStringIO()
    yield
    sys.stdout = save_stdout