def __readNdxFile(self, file, dic): fileName = file.filename sizeFile = file.file_size fileData = self.__zipFile.read(fileName, 'rU') bt = BytesIO(fileData) bt.seek(0) lenBytes = bt.read(2) # lenSum = self.BytesToInt(lenBytes) # print "lenSum = ", lenSum while bt.tell() < sizeFile: lenBytes12 = bt.read(12) # NULL bytes bytesNull = lenBytes12[0:2] bytesNullInt = self.BytesToInt(bytesNull) # FILETIME bytes bytesFileTime = lenBytes12[2:10] bytesFileTimeInt = self.BytesToInt(bytesFileTime) # offset in PDT bytes bytesOffsetPdt = lenBytes12[10:12] bytesOffsetPdtInt = self.BytesToInt(bytesOffsetPdt) startTimeInt = self.FiletimeToUnixtimestamp(bytesFileTimeInt) if bytesOffsetPdtInt not in dic.keys(): dic[bytesOffsetPdtInt] = [None, None] dic[bytesOffsetPdtInt][1] = startTimeInt bt.close()
def parse_data(src): # HTML解析 et = html.fromstring(src) # 整理数据 product_items_list = et.xpath( "//div[@class='list-product']//div[@class='plp-slide']") final_list = [] for i in product_items_list: data = {} data["img_box_src"] = i.xpath(".//div[@class='img-box']//img/@lazysrc") data["img_box_src"] = data["img_box_src"][0] if data[ "img_box_src"] else "" data["goods_tit"] = i.xpath(".//p[@class='goods-tit']/a/text()") data["goods_tit"] = data["goods_tit"][0] if data["goods_tit"] else "" data["goods_introudce"] = i.xpath( ".//p[@class='goods-introudce']/a/text()") data["goods_introudce"] = data["goods_introudce"][0] if data[ "goods_introudce"] else "" goods_classify = i.xpath(".//div[@class='goods-classify']//span") gc_list = data["goods_classify"] = [] for gc in goods_classify: dgc = {} dgc["title"] = gc.xpath("./img/@title") dgc["title"] = dgc["title"][0] if dgc["title"] else "" dgc["title"] = dgc["title"].replace('\xa0', ' ') dgc["code"] = gc.xpath("./@data-code") dgc["code"] = dgc["code"][0] if dgc["code"] else "" dgc["saleprice"] = gc.xpath("./@data-saleprice") dgc["saleprice"] = dgc["saleprice"][0] if dgc["saleprice"] else "" dgc["img_src"] = gc.xpath("./img/@src") dgc["img_src"] = dgc["img_src"][0] if dgc["img_src"] else "" # 解析SKU颜色值 if dgc["img_src"]: req_img = requests.get(dgc["img_src"], verify=False) img_data = req_img.content bio = BytesIO() bio.write(img_data) bio.seek(0) pimg = Image.open(bio) # 读入PIL图像 pimg.thumbnail((1, 1)) # 转换为1x1像素的图片 r, g, b = pimg.getcolors( pimg.size[0] * pimg.size[1])[0][1] # 形式:[(1, (223, 218, 212))] dgc["img_color"] = '#%02x%02x%02x' % (r, g, b) pimg.close() bio.close() else: dgc["img_color"] = "" gc_list.append(dgc) final_list.append(data) return final_list
def _create_dataset(): examples_file, label_file = BytesIO(), BytesIO() examples_file.name = 'examples.csv' label_file.name = 'labels.csv' iris = load_iris() numpy.savetxt(examples_file, iris.data, delimiter=',') numpy.savetxt(label_file, iris.target, delimiter=',') examples_file.seek(0), label_file.seek(0) return examples_file, label_file
def __init__(self, database, language="english", filename="english_lang_cds.txt"): self.database = database self.language = language # load decompression DLL decompDLL = ctypes.CDLL("riftdecomp.dll") stream = BytesIO() self.database.extractByNameToMemory("lang_" + language + ".cds", stream) # seek to start stream.seek(0) dis = LittleEndianDataInputStream(stream) entryCount = dis.read_int(); # read the frequency table frequencyTable = stream.read(1024) print("entryCount:" + str(entryCount)) # not sure what these are for i in range(0, entryCount): key = stream.read(4) value = readUnsignedLeb128(stream) f = open(filename, "w", encoding='UTF-8') for i in range(0, entryCount): compressedSize = readUnsignedLeb128(stream) uncompressedSize = readUnsignedLeb128(stream) entryData = stream.read(compressedSize) # create a buffer to decompress into outputData = ctypes.create_string_buffer(uncompressedSize) # call a DLL to do the actual decompress. The ASM code to decompress was too complicated to reverse engineer, so I just # took the code and put it into a DLL decompDLL.decompressData(frequencyTable, entryData, compressedSize, outputData, uncompressedSize) # And the results are in! # - The first 10 bytes we don't know, they seem to be the same between files though? buffer = BytesIO(outputData.raw) buffer.read(10) # - Then a LEB128 with length of string strLength = readUnsignedLeb128(buffer) # - Then string finalStringBytes = buffer.read(strLength) finalString = finalStringBytes.decode("utf-8") # print("doing entry: " + str(i) + ", length[" + str(strLength) + "]:" + finalString.encode(sys.stdout.encoding, errors="replace").decode(sys.stdout.encoding)) print(finalString,file=f) f.close()
class BaseBitcoinClient(object): def __init__(self, socket): self.socket = socket self.buffer = BytesIO() self.stop_client = False def close_stream(self): self.socket.close() def send_message(self, message): self.socket.sendall(message.to_bytes()) def handshake(self): # Send a "version" message to start the handshake msg = msg_version() # See BIP 111 (https://github.com/bitcoin/bips/blob/master/bip-0111.mediawiki) msg.nVersion = 70011 msg.fRelay = False # If false then broadcast transactions will not be announced until a filter{load,add,clear} command is received self.send_message(msg) def handle_version(self, _): # Respond with a "verack" message to a "version" message msg = msg_verack() self.send_message(msg) def handle_ping(self, ping_message): # Respond with a pong message to a ping message msg = msg_pong() msg.nonce = ping_message.nonce self.send_message(msg) def run(self): while self.stop_client != True: # Read and store the data from the socket data = self.socket.recv(64) self.buffer.write(data) try: # Go at the beginning of the buffer self.buffer.seek(0) # Deserialize the message message = MsgSerializable().stream_deserialize(self.buffer) # Reset the buffer remaining = self.buffer.read() self.buffer = BytesIO() self.buffer.write(remaining) # Call handle function if message is not None: handle_func_name = "handle_" + message.command.decode( "utf-8") handle_func = getattr(self, handle_func_name, None) if handle_func: handle_func(message) except SerializationTruncationError: # Read more data from the socket pass
def history_tick_content(contract, date): response = requests.get(hist_tick_url(contract, date), stream=True) if response.status_code == 200: disposition = response.headers['Content-Disposition'] bio = BytesIO(b"") chunk_size = 2**16 with click.progressbar(response.iter_content(chunk_size), label=disposition) as bar: for content in bar: bio.write(content) bio.seek(0) return bio.read() else: raise IOError(response.status_code)
def __readPdtFile(self, file, dic): fileName = file.filename sizeFile = file.file_size fileData = self.__zipFile.read(fileName, 'rU') bt = BytesIO(fileData) bt.seek(int('0x01A', 0)) while bt.tell() < sizeFile: pos = bt.tell() lenBytes = bt.read(2) lenBytesHex = "0x" + ''.join( [ "%02X" % ord( x ) for x in reversed(lenBytes) ] ) lenSum = int(lenBytesHex, 0) bytesProName = unicode(bt.read(lenSum), self.__jtvEncodeProgrammName) if pos not in dic.keys(): dic[pos] = [None, None] dic[pos][0] = bytesProName bt.close()
def decompress(self, input): header = lz4Header().read(input) table = lz4Table().read(input, header.chunkCount) input.seek(header.headerSize, 0) data = bytes(header.headerSize + sum(chunk.decompressedChunkSize for chunk in table)) memoryStream = BytesIO(data) memoryStream.seek(header.headerSize, 0) for chunk in table: saveChunk = chunk.read(input) #print(saveChunk) memoryStream.write(saveChunk) memoryStream.seek(header.headerSize, 0) return memoryStream
def test_dataset_post_dataset_length_mismatch(self): examples_file, label_file = BytesIO(), BytesIO() examples_file.name = 'examples.csv' label_file.name = 'labels.csv' iris = load_iris() breast_cancer = load_breast_cancer() numpy.savetxt(examples_file, iris.data, delimiter=',') numpy.savetxt(label_file, breast_cancer.target, delimiter=',') examples_file.seek(0), label_file.seek(0) client = DjangoClient() response = client.post(reverse('datasets'), data={ 'dataset': 'TEST', 'file[0]': examples_file, 'file[1]': label_file }) self.assertEqual(400, response.status_code) self.assertEqual(b'"Examples and labels are not the same length"', response.content)
def file_downloaded(self, response, request, info): filenames = re.findall(r'filename="(.+)\.pgn"', response.headers['Content-Disposition']) hash = filenames[0] body = response.body.decode('gbk') Event = re.findall(r'Event\s+"(.+)"', body)[0] Date = re.findall(r'Date\s+"(.+)"', body)[0] RedTeam = re.findall(r'RedTeam\s+"(.+)"', body)[0] Red = re.findall(r'Red\s+"(.+)"', body)[0] BlackTeam = re.findall(r'BlackTeam\s+"(.+)"', body)[0] Black = re.findall(r'Black\s+"(.+)"', body)[0] Result = re.findall(r'Result\s+"(.+)"', body)[0] ECCO = re.findall(r'ECCO\s+"(.+)"', body)[0] Opening = re.findall(r'Opening\s+"(.+)"', body)[0] sql = "INSERT INTO qipu (`date`, ecco, result, event, hash, redTeam, red, blackTeam, black, opening) VALUES ('%s','%s','%s','%s','%s','%s','%s','%s','%s','%s')" % ( Date, ECCO, Result, Event, hash, RedTeam, Red, BlackTeam, Black, Opening) print(sql) con = pymysql.Connect( host='rm-bp149hof32gt0cewt7o.mysql.rds.aliyuncs.com', port=3306, db='aichess', user='******', passwd='Java19786028', charset='utf8') cursor = con.cursor() try: cursor.execute(sql) con.commit() except: con.rollback() finally: cursor.close() con.close() buf = BytesIO(response.body) checksum = md5sum(buf) buf.seek(0) self.store.persist_file(hash + '.pgn', buf, info) return checksum
def writepkg(self, pkg_filename, include_media): "Code copied from gramps/plugins/export/exportpkg.py" try: archive = tarfile.open(pkg_filename, 'w:gz') except EnvironmentError as msg: log.warning(str(msg)) self.user.notify_error( _('Failure writing %s') % pkg_filename, str(msg)) return 0 # Write media files first, since the database may be modified # during the process (i.e. when removing object) if include_media: for m_id in self.db.get_media_handles(sort_handles=True): mobject = self.db.get_media_from_handle(m_id) filename = media_path_full(self.db, mobject.get_path()) archname = str(mobject.get_path()) if os.path.isfile(filename) and os.access(filename, os.R_OK): archive.add(filename, archname, filter=fix_mtime) # Write XML now g = BytesIO() gfile = XmlWriter(self.db, self.user, 2, compress=1, material_type=self.material_type, description=self.description) gfile.write_handle(g) tarinfo = tarfile.TarInfo('data.gramps') tarinfo.size = len(g.getvalue()) tarinfo.mtime = time.time() if not win(): tarinfo.uid = os.getuid() tarinfo.gid = os.getgid() g.seek(0) archive.addfile(tarinfo, g) archive.close() g.close() return True
def generate(width, height, image_format='PNG'): image = Image.new('RGB', (width, height)) draw = ImageDraw.Draw(image) text = "{}x{}".format(width, height) isexst = cache.get(text) if isexst is None: textwidth, textheight = draw.textsize(text) if textwidth < width and textheight < height: texttop = (height - textheight) // 2 textleft = (width - textwidth) // 2 draw.text((textleft, texttop), text, fill=(255, 0, 0)) content = BytesIO() image.save(content, image_format) content.seek(0) cache.set(text, content, 60 * 60) # image.show() return isexst # generate(100, 100)
def test__BytesIO_seek(self): x = BytesIO() # these should all succeed x.seek(0) x.seek(long(0)) x.seek(0, 0) x.seek(long(0), 0) x.seek(0, long(0)) x.seek(long(0), long(0)) # these should all fail self.assertRaises(TypeError, x.seek, 0, 0.0) self.assertRaises(TypeError, x.seek, long(0), 0.0) self.assertRaises(ValueError, x.seek, 0, 1000) self.assertRaises(ValueError, x.seek, long(0), 1000) self.assertRaises(OverflowError, x.seek, 0, sys.maxsize+1) self.assertRaises(OverflowError, x.seek, long(0), sys.maxsize+1) self.assertRaises(TypeError, x.seek, 0.0) self.assertRaises(TypeError, x.seek, 0.0, 0) self.assertRaises(OverflowError, x.seek, sys.maxsize+1) self.assertRaises(OverflowError, x.seek, sys.maxsize+1, 0)
def test__BytesIO_seek(self): x = BytesIO() # these should all succeed x.seek(0) x.seek(long(0)) x.seek(0, 0) x.seek(long(0), 0) x.seek(0, long(0)) x.seek(long(0), long(0)) # these should all fail self.assertRaises(TypeError, x.seek, 0, 0.0) self.assertRaises(TypeError, x.seek, long(0), 0.0) self.assertRaises(ValueError, x.seek, 0, 1000) self.assertRaises(ValueError, x.seek, long(0), 1000) self.assertRaises(OverflowError, x.seek, 0, sys.maxsize + 1) self.assertRaises(OverflowError, x.seek, long(0), sys.maxsize + 1) self.assertRaises(TypeError, x.seek, 0.0) self.assertRaises(TypeError, x.seek, 0.0, 0) self.assertRaises(OverflowError, x.seek, sys.maxsize + 1) self.assertRaises(OverflowError, x.seek, sys.maxsize + 1, 0)
def test__BytesIO_seek(self): # TODO: add cases that seek x = BytesIO() # these should all succeed x.seek(0) x.seek(0L) x.seek(0, 0) x.seek(0L, 0) x.seek(0, 0L) x.seek(0L, 0L) # these should all fail self.assertRaises(TypeError, x.seek, 0, 0.0) self.assertRaises(TypeError, x.seek, 0L, 0.0) self.assertRaises(ValueError, x.seek, 0, 1000) self.assertRaises(ValueError, x.seek, 0L, 1000) self.assertRaises(OverflowError, x.seek, 0, sys.maxsize+1) self.assertRaises(OverflowError, x.seek, 0L, sys.maxsize+1) self.assertRaises(TypeError, x.seek, 0.0) self.assertRaises(TypeError, x.seek, 0.0, 0) self.assertRaises(OverflowError, x.seek, sys.maxsize+1) self.assertRaises(OverflowError, x.seek, sys.maxsize+1, 0)
#testing.save(localuu[1:6]+"_asPNG.png") qr.add_data(CurQuery) #qr.add_data("http://This is experiment.com") qr.make(fit=True) #myyfactory = qrcode.image.svg.SvgImage #image.models. img = qr.make_image() #print(img) #ssuy = open("Blu_QR"+localuu[1:6]+".txt","w") #img.save("Blu_QR"+localuu[1:6]+".png", format='PNG') #img.save("Blu_QR"+localuu[1:6]+"_RLE.bmp", format='BMP', compression="bmp_rle") #img.save("Blu_QR"+localuu[1:6]+".bmp", format='BMP') bbuu = BytesIO() uiee = BytesIO() img.save(bbuu, format='PNG') bbuu.seek(0) #ssuy.flush() #bytesobj = img.tobytes('raw') #bytesobj = img.tobytes('PNG') FAILS bytesobj = bbuu.getvalue() #khyz = img.get_image() img_str = binascii.b2a_base64(bytesobj) #img_str = binascii.b2a_base64(ravdec.net_compression(bytesobj)) #img_str = ravdec.net_compression(binascii.b2a_base64(bytesobj)) #compresst = ravdec.net_compression(img_str) #compresst = ravdec.net_compression(img_str) #ssuy.writelines(img_str) #ssuy.flush() #img_str = bytesobj.format("02X") #img = qr.make(CurQuery,mfactory=myyfactory)
mapper.write(t2, out) print(out.getvalue()) out.seek(0) parser = JsonParser() print("???????????????2") mapper = JsonObjectMapper(backend(JSONDictionary())) t2out = mapper.read_obj(parser.parse(out), cls=Test2) print(t2out) print("&&&&&&&&&&&&&&&&&&&&&&&&&") out = BytesIO() parser = JsonParser() mapper = JsonObjectMapper(backend(JSONDictionary())) mapper.write([{"x": 1}, {"x": 1}], out) out.seek(0) events = parser.parse(out) res = mapper.read_array(events, cls=dict) print(res) print("&&&&&&&&&&&&&&&&&&&&&&&&&2") pi = PersonInfo("test", "Test", [Job("J1", "L1")]) out = BytesIO() parser = JsonParser() mapper = JsonObjectMapper(backend(JSONDictionary())) m = Module() m.add_deserializer(PersonInfo, PersonInfoDeserializer()) mapper.register_module(m) mapper.write(pi, out) out.seek(0)
def create_instream(self, bdata): buff = BytesIO() buff.write(bdata) buff.seek(0) return buff