示例#1
0
 def write(self, worker, probe, dest):
     """
     Serialize probe data, compress it and write it exclusively
     to output file.
     """
     data = StringIO()
     data.write(compress(dumps(probe, HIGHEST_PROTOCOL)))
     data.seek(0)
     info = tarfile.TarInfo()
     info.name = 'Probe_%s.lzo' % dest
     info.uid = 0
     info.gid = 0
     info.size = len(data.buf)
     info.mode = S_IMODE(0o0444)
     info.mtime = mktime(probe.circs[0].created.timetuple())
     with self._lock:
         # Maximum file size is about 1 GB
         if self._bytes_written >= 1 * 1000 * 1000 * 1000:
             self._tar.close()
             self._tar = self._create_tar_file()
             self._bytes_written = 0
         self._tar.addfile(tarinfo=info, fileobj=data)
         self._bytes_written += info.size
         self._threads_finished.append(worker)
         self._worker_finished.set()
示例#2
0
 def compressLZO(data):
     try:
         import lzo
     except ImportError:
         raise LoadException("can't save LZO saves, no LZO")
     else:
         return lzo.compress(data)
示例#3
0
 def write(self, webpage):
     key = uri_to_s3key(webpage['url'])
     self.s3.Object('samuel-html', key).put(Body=lzo.compress(json.dumps(webpage)))
     self.sqs.send_message(
         QueueUrl=self.queue_url,
         MessageBody=key
     )
     logger.info("s3: %s", key)
	def add_data(self,data,label):
		pickled_data = pickle.dumps(data, pickle.HIGHEST_PROTOCOL)
		lz_data = lzo.compress(pickled_data)
		conn = psycopg2.connect(self.conn_info)
		with conn:
			cursor = conn.cursor()
	#		cursor.execute("CREATE TABLE IF NOT EXISTS main_table("\
	#				+"T INT, "\
	#				+"Population_object BLOB)")
			cursor.execute("DELETE FROM "+self.db_id+" WHERE T="+str(label))
			cursor.execute("INSERT INTO "+self.db_id+" VALUES (%s,%s)",(label,psycopg2.Binary(lz_data)))
示例#5
0
def compressor_worker():
    while not q.empty():
        w_base, w_rel_base, w_f = q.get()

        w_rel_base = '' if w_rel_base == '.' else w_rel_base

        abs_path = os.path.join(w_base, w_f)
        rel_path = os.path.join(w_rel_base, w_f)

        extension = os.path.splitext(rel_path)[1][1:]

        raw_filestring = open(abs_path).read()
        compressed_filestring = lzo.compress(raw_filestring, options.compression)

        len_raw = len(raw_filestring)
        len_compressed = len(compressed_filestring)

        compression_factor = (float(len_compressed) / len_raw) if len_raw else 0
        compression_used = False

        if compression_factor < options.cutoff and False:
            compression_used = True

        string_final = compressed_filestring if compression_used else raw_filestring
        len_final = len(string_final)
        adler32_final = lzo.adler32(string_final)

        compressed_data_chunks.append({
            'path': rel_path,
            'path_mmh3': mmh3.hash64(rel_path)[0],
            'adler32': adler32_final,
            'size_before': len_raw,
            'size_after': len_final,
            'factor': compression_factor,
            'compression': 1 if compression_used else 0,
            'extension_str': extension,
            'extension': extensions[extension] if extension in extensions else 0,
            'data': string_final
        })

        if options.verbose:
            print('\t'.join((
                'Y' if compression_used else 'N',
                extension,
                '%.02f' % (compression_factor * 100.0),
                str(len_raw / 1024),
                str(len_final / 1024),
                str(adler32_final),
                rel_path
            )))

        q.task_done()
示例#6
0
文件: test.py 项目: AlexSnet/oneline
def test(src, level=1):
    a0 = lzo.adler32(src)
    c =  lzo.compress(src, level)
    u1 = lzo.decompress(c)
    a1 = lzo.adler32(u1)
    o =  lzo.optimize(c)
    u2 = lzo.decompress(o)
    a2 = lzo.adler32(u2)
    if src != u1 or src != u2:
        raise lzo.error, "internal error 1"
    if a0 != a1 or a0 != a2:
        raise lzo.error, "internal error 2"
    print "compressed %6d -> %6d" % (len(src), len(c))
示例#7
0
def test(src, level = 1):
    a0 = lzo.adler32(src)
    c =  lzo.compress(src,level)
    u1 = lzo.decompress(c)
    a1 = lzo.adler32(u1)
    o =  lzo.optimize(c)
    u2 = lzo.decompress(o)
    a2 = lzo.adler32(u2)
    if cmp(src,u1) != 0 or cmp(src,u2) != 0:
        raise lzo.error, "internal error 1"
    if cmp(a0,a1) != 0 or cmp(a0,a2) != 0:
        raise lzo.error, "internal error 2"
    print "compressed %6d -> %6d" % (len(src), len(c))
def storeInDatabase(cur, compressionLevel, filePathAndName):

    i = open(filePathAndName, 'rb')
    data = i.read()
    i.close()
    
    if compressionLevel > 0:
        ldata = lzo.compress(data, compressionLevel)
        data = ldata

    cur.execute("INSERT INTO map (name, compression, data_file) values (?, ?, ?) ",
                (filePathAndName, compressionLevel, sqlite3.Binary(data))
               )
示例#9
0
def _mdx_compress(data, compression_type=2):
	header = (struct.pack(b"<L", compression_type) + 
	         struct.pack(b">L", zlib.adler32(data) & 0xffffffff)) #depending on python version, zlib.adler32 may return a signed number. 
	if compression_type == 0: #no compression
		return header + data
	elif compression_type == 2:
		return header + zlib.compress(data)
	elif compression_type == 1:
		if HAVE_LZO:
			return header + lzo.compress(data)[5:] #python-lzo adds a 5-byte header.
		else:
			raise NotImplementedError()
	else:
		raise ParameterError("Unknown compression type")
示例#10
0
def gen_raw(src, level=1):
    a0 = lzo.adler32(src)
    c =  lzo.compress(src, level, False)
    u1 = lzo.decompress(c, False, len(src))
    a1 = lzo.adler32(u1)
    o =  lzo.optimize(c, False, len(src))
    u2 = lzo.decompress(o, False, len(src))
    a2 = lzo.adler32(u2)
    # make sure it still works when you overstate the output buffer length
    u3 = lzo.decompress(c, False, len(src) + 100)
    if src != u1 or src != u2 or src != u3:
        raise lzo.error("internal error 1")
    if a0 != a1 or a0 != a2:
        raise lzo.error("internal error 2")
    print("compressed %6d -> %6d" % (len(src), len(c)))
示例#11
0
def gen(src, level=1):
    a0 = lzo.adler32(src)
    c =  lzo.compress(src, level)
    u1 = lzo.decompress(c)
    a1 = lzo.adler32(u1)
    o =  lzo.optimize(c)
    u2 = lzo.decompress(o)
    a2 = lzo.adler32(u2)
    if src != u1:
        raise lzo.error("internal error 1: %r %r", src, u1)
    if src != u2:
        raise lzo.error("internal error 1: %r %r",  src, u2)
    if a0 != a1 or a0 != a2:
        raise lzo.error("internal error 2")
    print("compressed %6d -> %6d" % (len(src), len(c)))
示例#12
0
def gen_raw(src, level=1):
    a0 = lzo.adler32(src)
    c =  lzo.compress(src, level, False)
    u1 = lzo.decompress(c, False, len(src))
    a1 = lzo.adler32(u1)
    o =  lzo.optimize(c, False, len(src))
    u2 = lzo.decompress(o, False, len(src))
    a2 = lzo.adler32(u2)
    # make sure it still works when you overstate the output buffer length
    u3 = lzo.decompress(c, False, len(src) + 100)
    if src != u1 or src != u2 or src != u3:
        raise lzo.error("internal error 1")
    if a0 != a1 or a0 != a2:
        raise lzo.error("internal error 2")
    print("compressed %6d -> %6d" % (len(src), len(c)))
示例#13
0
def Compress(Input, Level):
    Output = Input + '.lzo'
    file_in = file(Input, "r")
    data = file_in.read()

    file_out = file(Output, "w")
    if not Level:
        level = 5
    else:
        level = int(Level)
    c_data = lzo.compress(data, level)
    file_out.write(c_data)
    file_out.close()

    file_in.close()
示例#14
0
	def commit_data(self,exp,graph,method):
		self.cursor.execute("SELECT Modif_Time FROM computed_data_table WHERE Id=\'"+exp.uuid+"\' AND Function=\'"+method+"\'")
		tempmodiftup = self.cursor.fetchone()
		self.cursor.execute("SELECT Time_max FROM computed_data_table WHERE Id=\'"+exp.uuid+"\' AND Function=\'"+method+"\'")
		tempmodiftup2 = self.cursor.fetchone()
		if not tempmodiftup:
			#if not graph._X[0][0] == 0 and self.data_exists(xp_uuid=exp.uuid,method=method):
			#	graph.complete_with(self.get_graph(exp.uuid,graph,method))
			binary=self.sql.Binary(lzo.compress(pickle.dumps(graph,pickle.HIGHEST_PROTOCOL)))
			self.cursor.execute("INSERT INTO computed_data_table VALUES(" + self.var + "," + self.var + "," + self.var + "," + self.var + "," + self.var + "," + self.var + "," + self.var + ")", (\
				exp.uuid, \
				graph.init_time, \
				graph.modif_time, \
				json.dumps({'pop_cfg':exp._pop_cfg, 'step':exp._time_step}, sort_keys=True), \
				method, \
				graph._X[0][-1], \
				binary,))
		elif graph._X[0][-1]>tempmodiftup2[0]:#tempmodiftup[0]!=graph.modif_time and
			binary=self.sql.Binary(lzo.compress(pickle.dumps(graph,pickle.HIGHEST_PROTOCOL)))
			self.cursor.execute("UPDATE computed_data_table SET "\
				+"Modif_Time=\'"+str(graph.modif_time)+"\', "\
				+"Time_max=\'"+str(graph._X[0][-1])+"\', "\
				+"Custom_Graph=" + self.var + " WHERE Id=\'"+str(exp.uuid)+"\' AND Function=\'"+method+"\'",(binary,))
		self.connection.commit()
示例#15
0
def gen(src, level=1):
    a0 = lzo.adler32(src)
    c =  lzo.compress(src, level)
    u1 = lzo.decompress(c)
    a1 = lzo.adler32(u1)
    o =  lzo.optimize(c)
    u2 = lzo.decompress(o)
    a2 = lzo.adler32(u2)
    if src != u1:
        raise lzo.error("internal error 1: %r %r", src, u1)
    if src != u2:
        raise lzo.error("internal error 1: %r %r",  src, u2)
    if a0 != a1 or a0 != a2:
        raise lzo.error("internal error 2")
    print("compressed %6d -> %6d" % (len(src), len(c)))
示例#16
0
def add_data_conn(cursor, filepath, data, label):
    pickled_data = pickle.dumps(data, pickle.HIGHEST_PROTOCOL)
    lz_data = lzo.compress(pickled_data)
    try:
        os.makedirs(os.path.dirname(filepath))
    except OSError as exc:
        if exc.errno == errno.EEXIST and os.path.isdir(
                os.path.dirname(filepath)):
            pass
        else:
            raise
    cursor.execute("CREATE TABLE IF NOT EXISTS main_table("\
       +"T INT, "\
       +"Population_object BLOB)")
    cursor.execute("INSERT INTO main_table VALUES (?,?)",
                   (label, sql.Binary(lz_data)))
示例#17
0
def test_chunk_read_existing(tmpdir):
    store = Store(str(tmpdir / 'store'))

    chunk_hash = hash('asdf')
    p = store.chunk_path(chunk_hash)
    with open(p, 'wb') as existing:
        existing.write(lzo.compress(b'asdf'))

    f = File(str(tmpdir / 'asdf'), store)

    chunk = Chunk(f, 1, store, chunk_hash)
    assert chunk.read(0) == (b'asdf', -1)
    assert chunk.read(0, 10) == (b'asdf', 6)

    # Check that flushing a file that hasn't been written to does not fail.
    chunk.flush()
示例#18
0
def test_underrun_gets_noticed(tmpdir):
    # This is a somewhat weird case. this means we're referring to a block
    # that is correctly compressed but has too little data in it.
    store = Store(str(tmpdir))
    with File(str(tmpdir / 'asdf'), store) as f:
        f.write(b'asdfasdfasdf')
        f._flush_chunks(0)
        hash = list(f._mapping.values())[0]
        hash_file = store.chunk_path(hash)
        os.chmod(hash_file, 0o660)
        with open(hash_file, 'wb') as cf:
            cf.write(lzo.compress(b'asdf'))

        f.seek(0)
        with pytest.raises(ValueError):
            f.read()
示例#19
0
	def add_data(self,data,label):
		assert data is not None
		if not os.path.isfile(self.filepath):
			if not os.path.isfile(self.filepath+'.xz'):
				raise IOError('No file for poplist: '+self.filepath+' . You should call init_db before adding elements')
			else:
				xz_decompress(self.filepath+'.xz')
		pickled_data = pickle.dumps(data, pickle.HIGHEST_PROTOCOL)
		lz_data = lzo.compress(pickled_data)
		conn = sql.connect(self.filepath)
		with conn:
			cursor = conn.cursor()
	#		cursor.execute("CREATE TABLE IF NOT EXISTS main_table("\
	#				+"T INT, "\
	#				+"Population_object BLOB)")
			cursor.execute("DELETE FROM main_table WHERE T="+str(label))
			cursor.execute("INSERT INTO main_table VALUES (?,?)",(label,sql.Binary(lz_data)))
示例#20
0
    def commit(self, exp):
        binary = self.sql.Binary(
            lzo.compress(pickle.dumps(exp, pickle.HIGHEST_PROTOCOL)))
        if not exp._exec_time:
            exec_time = -1
        else:
            exec_time = exp._exec_time[-1]
        if not exp._T:
            T = -1
        else:
            T = exp._T[-1]
        try:
            self.cursor.execute("SELECT Tmax FROM main_table WHERE Id=\'" +
                                exp.uuid + "\'")
        except:
            print(exp.uuid)
            print(self.dbpath)
            print(os.getcwd())
            raise
        tempmodiftup = self.cursor.fetchone()
        if not tempmodiftup:

            self.cursor.execute("INSERT INTO main_table VALUES(" + self.var + "," + self.var + "," + self.var + "," + self.var + "," + self.var + "," + self.var + "," + self.var + "," + self.var + ")", (\
             exp.uuid, \
             exp.init_time, \
             exp.modif_time, \
             exec_time, \
             json.dumps({'pop_cfg':exp._pop_cfg, 'step':exp._time_step}, sort_keys=True), \
		#		exp._voctype, \
		#		exp._strat["strattype"], \
		#		exp._M, \
		#		exp._W, \
		#		exp._nbagent, \
             T, \
             exp._time_step, \
             binary,))
        #elif tempmodiftup[0]<exp.modif_time:
        elif tempmodiftup[0] < T:
            self.cursor.execute("UPDATE main_table SET "\
             +"Modif_Time=\'"+str(exp.modif_time)+"\', "\
             +"Exec_Time=\'"+str(exec_time)+"\', "\
             +"Tmax=\'"+str(T)+"\', "\
             +"step=\'"+str(exp._time_step)+"\', "\
             +"Experiment_object=" + self.var + " WHERE Id=\'"+str(exp.uuid)+"\'",(binary,))
        self.connection.commit()
示例#21
0
def wrap_player_data(player, endian=1):
    crc = lzo.crc32(player) & 0xffffffff

    bitstream = WriteBitstream()
    tree = make_huffman_tree(player)
    write_huffman_tree(tree, bitstream)
    huffman_compress(invert_tree(tree), player, bitstream)
    data = bitstream.getvalue() + "\x00\x00\x00\x00"

    header = struct.pack(">I3s", len(data) + 15, "WSG")
    if endian == 1:
        header = header + struct.pack(">III", 2, crc, len(player))
    else:
        header = header + struct.pack("<III", 2, crc, len(player))

    data = lzo.compress(header + data)[1: ]

    return hashlib.sha1(data).digest() + data
 def execute_parallel_lzo(self, frame: int = 0, pipe: bool = False):
     """ Compress frames using LZO method. """
     Util().message('debug', f'execute_parallel_lzo({frame}) called.')
     try:
         out_frame = self.frame_queue_buffer.pop(frame).result()
     except KeyError:
         out_frame = self.clip.get_frame_async(frame).result()
         self.last_queued_frame = frame
     frame_data = []
     for plane in out_frame.planes():
         frame_data.append(np.asarray(plane))
     frame_data = lzo.compress(pickle.dumps(frame_data),
                               self.compression_level)
     if pipe:
         return frame_data
     else:
         frame_props = dict(out_frame.props)
         return frame_data, frame_props
示例#23
0
def wrap_player_data(player, endian=1):
    crc = lzo.crc32(player) & 0xffffffff

    bitstream = WriteBitstream()
    tree = make_huffman_tree(player)
    write_huffman_tree(tree, bitstream)
    huffman_compress(invert_tree(tree), player, bitstream)
    data = bitstream.getvalue() + "\x00\x00\x00\x00"

    header = struct.pack(">I3s", len(data) + 15, "WSG")
    if endian == 1:
        header = header + struct.pack(">III", 2, crc, len(player))
    else:
        header = header + struct.pack("<III", 2, crc, len(player))

    data = lzo.compress(header + data)[1:]

    return hashlib.sha1(data).digest() + data
示例#24
0
def _mdx_compress(data, compression_type=2):
    header = (
        struct.pack(b"<L", compression_type) +
        struct.pack(b">L",
                    zlib.adler32(data) & 0xffffffff)
    )  #depending on python version, zlib.adler32 may return a signed number.
    if compression_type == 0:  #no compression
        return header + data
    elif compression_type == 2:
        return header + zlib.compress(data)
    elif compression_type == 1:
        if HAVE_LZO:
            return header + lzo.compress(data)[
                5:]  #python-lzo adds a 5-byte header.
        else:
            raise NotImplementedError()
    else:
        raise ParameterError("Unknown compression type")
示例#25
0
	def commit(self,exp):
		binary=self.sql.Binary(lzo.compress(pickle.dumps(exp,pickle.HIGHEST_PROTOCOL)))
		if not exp._exec_time:
			exec_time = -1
		else:
			exec_time = exp._exec_time[-1]
		if not exp._T:
			T = -1
		else:
			T = exp._T[-1]
		try:
			self.cursor.execute("SELECT Tmax FROM main_table WHERE Id=\'"+exp.uuid+"\'")
		except:
			print(exp.uuid)
			print(self.dbpath)
			print(os.getcwd())
			raise
		tempmodiftup = self.cursor.fetchone()
		if not tempmodiftup:

			self.cursor.execute("INSERT INTO main_table VALUES(" + self.var + "," + self.var + "," + self.var + "," + self.var + "," + self.var + "," + self.var + "," + self.var + "," + self.var + ")", (\
				exp.uuid, \
				exp.init_time, \
				exp.modif_time, \
				exec_time, \
				json.dumps({'pop_cfg':exp._pop_cfg, 'step':exp._time_step}, sort_keys=True), \
		#		exp._voctype, \
		#		exp._strat["strattype"], \
		#		exp._M, \
		#		exp._W, \
		#		exp._nbagent, \
				T, \
				exp._time_step, \
				binary,))
		#elif tempmodiftup[0]<exp.modif_time:
		elif tempmodiftup[0]<T:
			self.cursor.execute("UPDATE main_table SET "\
				+"Modif_Time=\'"+str(exp.modif_time)+"\', "\
				+"Exec_Time=\'"+str(exec_time)+"\', "\
				+"Tmax=\'"+str(T)+"\', "\
				+"step=\'"+str(exp._time_step)+"\', "\
				+"Experiment_object=" + self.var + " WHERE Id=\'"+str(exp.uuid)+"\'",(binary,))
		self.connection.commit()
示例#26
0
    def save_episode(self, name):
        states = []
        actions = []
        rewards = []
        entropies = []

        e = self._first_experience

        while e:
            states.append(e.state())
            actions.append(e.action)
            rewards.append(e.reward)
            entropies.append(e.entropy)

            e = e.next_experience

        s = pickle.dumps((states, actions, rewards, entropies))
        s = lzo.compress(s)
        f = open(name + '.episode', 'wb')
        f.write(s)
        f.close()
示例#27
0
def add_data(filepath, data, label, priority='decompressed'):
    if priority == 'compressed' and os.path.isfile(filepath + '.xz'):
        xz_decompress(filepath + '.xz')
    pickled_data = pickle.dumps(data, pickle.HIGHEST_PROTOCOL)
    lz_data = lzo.compress(pickled_data)
    try:
        os.makedirs(os.path.dirname(filepath))
    except OSError as exc:
        if exc.errno == errno.EEXIST and os.path.isdir(
                os.path.dirname(filepath)):
            pass
        else:
            raise
    conn = sql.connect(filepath)
    with conn:
        cursor = conn.cursor()
        cursor.execute("CREATE TABLE IF NOT EXISTS main_table("\
          +"T INT, "\
          +"Population_object BLOB)")
        cursor.execute("INSERT INTO main_table VALUES (?,?)",
                       (label, sql.Binary(lz_data)))
示例#28
0
 def flush(self):
     if self.clean:
         return
     assert self.data is not None
     self._update_hash()
     target = self.store.chunk_path(self.hash)
     needs_forced_write = (self.store.force_writes
                           and self.hash not in self.store.seen_forced)
     if not self.hash in self.store.known or needs_forced_write:
         # Create the tempfile in the right directory to increase locality
         # of our change - avoid renaming between multiple directories to
         # reduce traffic on the directory nodes.
         fd, tmpfile_name = tempfile.mkstemp(dir=os.path.dirname(target))
         posix_fadvise(fd, 0, 0, os.POSIX_FADV_DONTNEED)
         with os.fdopen(fd, mode='wb') as f:
             data = lzo.compress(self.data.getvalue())
             f.write(data)
         # Micro-optimization: chmod before rename to help against
         # metadata flushes and then changing metadata again.
         os.chmod(tmpfile_name, 0o440)
         os.rename(tmpfile_name, target)
         self.store.seen_forced.add(self.hash)
         self.store.known.add(self.hash)
     self.clean = True
示例#29
0
def iproto_save(ipin, ipout, m_version, m_count, m_struct):
    def nullpadding(mlen, msize=8):
        retlen = ''
        mpad = mlen % msize
        if mpad:
            retlen = (msize - mpad) * chr(0)
        return retlen

    pack, unpack = struct.pack, struct.unpack
    global MT2_MAGIC1, MT2_MAGIC3, MT2_XTEAKEY_IPX, LZO_COMPRESSION_LEVEL

    ip_out2X = open(ipin, "rb")
    ip_out2 = ip_out2X.read()
    ip_out2X.close()
    del ip_out2X

    ipl_out2 = len(ip_out2)
    ip_out3 = lzo.compress(ip_out2, LZO_COMPRESSION_LEVEL)
    ipd_out3 = unpack("!L", ip_out3[1:5])[0]

    ipp_out3 = ip_out3[5:]
    ipl_out3 = len(ipp_out3)
    ip_out4 = ipp_out3 + nullpadding(ipl_out3)

    ip_out5 = _xtea.encrypt_all(MT2_MAGIC1 + ip_out4 + chr(0) * 4,
                                MT2_XTEAKEY_IPX)
    ipl_out5 = len(ip_out5)

    ipp_h1 = MT2_MAGIC3 + pack("I", m_version) + pack("I", m_struct) + pack(
        "I", m_count) + pack("I", ipl_out5 + 16)
    ipp_hl2 = MT2_MAGIC1 + pack("I", ipl_out5) + pack("I", ipl_out3) + pack(
        "I", ipd_out3)

    ip_outX = open(ipout, "wb")
    ip_outX.write(ipp_h1 + ipp_hl2 + ip_out5)
    ip_outX.close()
示例#30
0
def mproto_save(mpin, mpout, m_count):
    def nullpadding(mlen, msize=8):
        retlen = ''
        mpad = mlen % msize
        if mpad:
            retlen = (msize - mpad) * chr(0)
        return retlen

    pack, unpack = struct.pack, struct.unpack
    global MT2_MAGIC1, MT2_MAGIC4, MT2_XTEAKEY_MPX, LZO_COMPRESSION_LEVEL

    mp_out2X = open(mpin, "rb")
    mp_out2 = mp_out2X.read()
    mp_out2X.close()
    del mp_out2X

    mpl_out2 = len(mp_out2)
    mp_out3 = lzo.compress(mp_out2, LZO_COMPRESSION_LEVEL)
    mpd_out3 = unpack("!L", mp_out3[1:5])[0]

    mpp_out3 = mp_out3[5:]
    mpl_out3 = len(mpp_out3)
    mp_out4 = mpp_out3 + nullpadding(mpl_out3)

    mp_out5 = _xtea.encrypt_all(MT2_MAGIC1 + mp_out4 + chr(0) * 4,
                                MT2_XTEAKEY_MPX)
    mpl_out5 = len(mp_out5)
    pass

    mpp_h1 = MT2_MAGIC4 + pack("I", m_count) + pack("I", mpl_out5 + 16)
    mpp_hl2 = MT2_MAGIC1 + pack("I", mpl_out5) + pack("I", mpl_out3) + pack(
        "I", mpd_out3)

    mpp_outX = open(mpout, "wb")
    mpp_outX.write(mpp_h1 + mpp_hl2 + mp_out5)
    mpp_outX.close()
示例#31
0
 def lzo_mangler(s):
     return lzo.compress(s, 9)
示例#32
0
 def lzo_compress(packet, level):
     if _memoryview and isinstance(packet, _memoryview):
         packet = packet.tobytes()
     return level | LZO_FLAG, lzo.compress(packet)
示例#33
0
elif command == "compress-sz":
    with open(filename, "r") as f:
        cdata = snappy.compress(f.read().encode("utf-8"))
    sys.stdout.buffer.write(cdata)
elif command == "decompress-sz":
    with open(filename, "rb") as f:
        udata = snappy.decompress(f.read()).decode("utf-8")
    sys.stdout.write(udata)

# LZO
elif command == "read-lzo":
    with open(filename, "rb") as f:
        card = toml.loads(lzo.decompress(f.read()).decode("utf-8"))
elif command == "compress-lzo":
    with open(filename, "r") as f:
        cdata = lzo.compress(f.read().encode("utf-8"))
    sys.stdout.buffer.write(cdata)
elif command == "decompress-lzo":
    with open(filename, "rb") as f:
        udata = lzo.decompress(f.read()).decode("utf-8")
    sys.stdout.write(udata)

def warning(text):
    print(f"{Fore.YELLOW}{Style.BRIGHT}Warning: {text}{Style.RESET_ALL}")

# def isLeapYear(year):
#     if year % 100 == 0:
#         if year % 400 == 0 and year % 4 == 0:
#             return True
#         return False
#     elif year % 4 == 0:
示例#34
0
 def lzo_mangler(s):
     return lzo.compress(s, 9)
示例#35
0
文件: LzoCodec.py 项目: jephdo/Hadoop
 def compress(self, data):
     return lzo.compress(data)
示例#36
0
 def compress(self, data):
     return lzo.compress(data, 1)
示例#37
0
# Python module that is distributed under GPLv2.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.

import sys

try:
    import lzo
except ImportError:
    exit(-1)  # python-lz4 is not installed

try:
    data = sys.stdin.buffer.read()
    data = lzo.compress(data)
    sys.stdout.buffer.write(data)
except Exception as e:
    print(e, file=sys.stderr)
    exit(1)
示例#38
0
import io
import lzo
import struct
import sys

HEADER = b"\xFF\xFF\xFF\xFF\x03\x00\x00\x00"

if len(sys.argv) < 2:
    print("Not enough arguments", file=sys.stderr)

file_name = sys.argv[1]

decomp = io.open(file_name, "rb")
data = decomp.read()
decomp.close()

size = len(data)

comp = lzo.compress(data, 1, False)

out = io.open(file_name[0:len(file_name) - 1] + "c", "wb")
out.write(comp)
out.close()

out = io.open(file_name[0:len(file_name) - 2], "wb")
out.write(HEADER)
out.write(struct.pack("<I", size))
out.write(comp)
out.close()
示例#39
0
 def lzo_compress(packet, level):
     return level | LZO_FLAG, lzo.compress(packet)
示例#40
0
def compression_factor(chunk):
    l1 = len(chunk)
    l2 = len(lzo.compress(chunk))
    return float(l2)/l1
示例#41
0
block_data = bitcoin_pb2.Block()
block_data.hash = "abcd1234"
block_data.confirmations = 213123
block_data.size = 132321
block_data.height = 123141
block_data.version = 1
block_data.time = 12312312
block_data.nonce = 2132131
block_data.bits = "abcd1234"
block_data.difficulty = float(2421.232121)
block_data.chainwork = "abcd1234"
block_data.previousblockhash = "abcd1234"
block_data.nextblockhash = "abcd1234"
transaction = block_data.tx.add()
transaction.hash = "transaction"
data = lzo.compress(str(block_data.SerializeToString()), 1)
data = base64.b64encode(data)
fh.write(data + "\n")
fh.write(data + "\n")
fh.write(data + "\n")
fh.write(data + "\n")
fh.write(data + "\n")

# newblock = bitcoin_pb2.Block()
# newblock.ParseFromString(lzo.decompress(data))
# print newblock.hash

# except Exception as e:
# 	print "****************Error*******"
# 	raise
示例#42
0
 def lzo_compress(packet, level):
     return level | LZO_FLAG, lzo.compress(packet)
示例#43
0
 def flush():
     if arrtracc is not None:
         outdb[arrtracc] = comp.compress(arr[:ARRSAVING].tostring())
示例#44
0
 def lzo_compress(packet, level):
     if isinstance(packet, memoryview):
         packet = packet.tobytes()
     return level | LZO_FLAG, python_lzo.compress(packet)
示例#45
0
def save_gbx(options: dict, template: str, output: str):
    context = GbxSaveContext(True)

    def data_replace(s, rep, offset, rlen=-1):
        if rlen == -1:
            rlen = len(rep)
        return s[:offset] + rep + s[offset + rlen:]

    temp_gbx = Gbx(template)
    challenge = temp_gbx.get_class_by_id(GbxType.CHALLENGE)
    common = temp_gbx.get_class_by_id(0x03043003)

    if 'rotation' in options:
        track = rotate_track_challenge(challenge, options['rotation'])
    elif 'input' in options:
        track = pickle.load(open(options['input'], 'rb'))
    elif 'track_data' in options:
        track = options['track_data']

    track = populate_flags(track)

    context.append_to_string_store(challenge.map_uid)
    context.append_to_string_store(challenge.environment)
    context.append_to_string_store(challenge.map_author)
    context.append_to_string_store(challenge.map_name)
    context.append_to_string_store(challenge.mood)
    context.append_to_string_store(challenge.env_bg)
    context.append_to_string_store(challenge.env_author)

    udata = bytes(temp_gbx.data)

    temp = open(template, 'rb')
    data = temp.read()

    # We have to be very careful of order we save the data.
    # We begin saving the data from the very end to the beggining of the file,
    # so that all Gbx's class offsets are always valid.

    # Modifying body
    # Blocks
    context.write_uint32(len(track))
    for block in track:
        context.write_block(block)

    info = temp_gbx.positions['block_data']
    if info.valid:
        udata = data_replace(udata, context.data,
                             info.pos, info.size)

    # The mood
    # info = temp_gbx.positions['mood']
    # if info.valid:
    #     mood = random.choices(MOODS, MOOD_WEIGHTS)[0]
    #     print(mood)
    #     udata = data_replace(udata, write_lookback_string(
    #         stored_strings, seen_lookback, mood), info.pos, info.size)

    # Map name in editor
    context.reset()
    if 'map_name' in options:
        map_name = options['map_name']
    else:
        map_name = get_map_name(output)
    
    context.write_string(map_name)

    # The map name
    info = temp_gbx.positions['map_name']
    if info.valid:
        udata = data_replace(udata, context.data, info.pos, info.size)

    compressed = lzo.compress(bytes(udata), 1, False)

    fs = open(output, 'wb+')

    # New data and compressed data size
    data_size_offset = temp_gbx.positions['data_size'].pos

    comp_data_size_offset = data_size_offset + 4
    comp_data_offset = comp_data_size_offset + 4

    data = data_replace(data, struct.pack('I', len(udata)),
                        data_size_offset)
    data = data_replace(data, struct.pack(
        'I', len(compressed)), comp_data_size_offset)
    data = data_replace(data, compressed, comp_data_offset)

    # Modifying header
    # The track name in map chooser
    info = temp_gbx.positions['track_name']
    if info.valid:
        data = data_replace(data, context.data, info.pos, info.size)

    # New chunk size since track name length could change
    user_data_diff = len(common.track_name) - len(map_name)
    info = temp_gbx.positions['50606083']
    if info.valid:
        prev = temp_gbx.root_parser.pos
        temp_gbx.root_parser.pos = info.pos
        new_chunk_size = temp_gbx.root_parser.read_uint32() - user_data_diff
        temp_gbx.root_parser.pos = prev

        data = data_replace(data, struct.pack(
            'I', new_chunk_size), info.pos, info.size)

    # Finally, the user data size
    new_user_data_size = temp_gbx.user_data_size - user_data_diff
    info = temp_gbx.positions['user_data_size']
    if info.valid:
        data = data_replace(data, struct.pack(
            'I', new_user_data_size), info.pos, info.size)

    fs.write(data)
    fs.close()
示例#46
0
def parse_savefile(fn):
    with open(fn, "rb") as f:
        data = Consumable(f.read())
    # PC builds, presumably including Linux builds, should be
    # little-endian and LZO-compressed. Some retrievals are
    # forced big-endian, others vary by platform. Dunno why.
    endian = "little"
    hash = data.get(20)
    if hash != hashlib.sha1(data.peek()).digest():
        raise SaveFileFormatError("Hash fails to validate")
    uncompressed_size = int.from_bytes(data.get(4), "big")
    if uncompressed_size > 0x40000:
        raise SaveFileFormatError("TODO: handle chunked decompression")
    raw = lzo.decompress(data.peek(), False, uncompressed_size)
    if len(raw) != uncompressed_size:
        raise SaveFileFormatError("Got wrong amount of data back (%d != %d)" %
                                  (len(raw), uncompressed_size))
    if args.verify:
        # LZO compression isn't stable or consistent enough to compare the
        # compressed bytes to what we got from the file. But let's just
        # quickly make sure we can get something back, at least.
        comp = lzo.compress(raw, 1, False)
        if lzo.decompress(comp, False, uncompressed_size) != raw:
            print(
                "Recompression gives something that we didn't get first time!")
            return ""
    # Okay. Decompression complete. Now to parse the actual data.
    data = Consumable(raw)
    size = int.from_bytes(data.get(4), "big")
    if size != len(data):
        raise SaveFileFormatError(
            "Size doesn't match remaining bytes - corrupt file? chunked?")
    if data.get(3) != b"WSG":
        raise SaveFileFormatError("Invalid magic number - corrupt file?")
    if int.from_bytes(data.get(4), endian) != 2:
        raise SaveFileFormatError(
            "Unsupported version number (probable corrupt file)")
    crc = int.from_bytes(data.get(4), endian)
    uncomp_size = int.from_bytes(
        data.get(4), endian)  # Gibbed uses a *signed* 32-bit int here
    # For some bizarre reason, the data in here is Huffman-compressed.
    # The whole file has already been LZO-compressed. No point compressing twice!
    # Not sure what the last four bytes are. The end of the compressed sequence
    # finishes off the current byte, and then there are always four more bytes.
    if data.peek()[-4:] != b"\xd4\x93\x9f\x1a":
        raise SaveFileFormatError("Different last four bytes: %r" %
                                  data.peek()[-4:])
    data = huffman_decode(data.peek()[:-4], uncomp_size)
    if crc != binascii.crc32(data):
        raise SaveFileFormatError("CRC doesn't match (%d vs %d)" %
                                  (crc, binascii.crc32(data)))
    if args.verify:
        reconstructed = huffman_encode(data)
        reconstructed = b"".join([
            (3 + 4 + 4 + 4 + len(reconstructed) + 4).to_bytes(4, "big"),
            b"WSG",
            (2).to_bytes(4, endian),
            binascii.crc32(data).to_bytes(4, endian),
            len(data).to_bytes(4, endian),
            reconstructed,
            b"\xd4\x93\x9f\x1a",
        ])
        if reconstructed != raw:
            if len(reconstructed) != len(raw):
                print("Imperfect recompression:", len(raw), len(reconstructed))
                return ""
            print("Mismatched after recompression", len(raw))
            for ofs in range(0, len(raw), 64):
                old = raw[ofs:ofs + 64]
                new = reconstructed[ofs:ofs + 64]
                if old != new:
                    print(ofs, old)
                    print(ofs, new)
            return ""
    savefile = SaveFile.decode_protobuf(data)
    if args.verify:
        reconstructed = savefile.encode_protobuf()
        if reconstructed != data:
            print("Imperfect reconstruction:", len(data))
            for sz in range(64, max(len(data), len(reconstructed)) + 65, 64):
                if data[:sz] == reconstructed[:sz]: continue
                print(sz - 64)
                print(data[sz - 64:sz])
                print(reconstructed[sz - 64:sz])
                break
            return ""
    cls = get_asset("Player Classes")[savefile.playerclass]["class"]
    # The packed_weapon_data and packed_item_data arrays contain the correct
    # number of elements for the inventory items. (Equipped or backpack is
    # irrelevant, but anything that isn't a weapon ('nade mod, class mod, etc)
    # goes in the item data array.)
    items = []
    for item in (savefile.packed_weapon_data or []) + (
            savefile.packed_item_data or []) + (savefile.bank or []):
        if args.loot_filter is None: break
        it = Asset.decode_asset_library(item.serial)
        if not it: continue
        for filter, filterargs in args.loot_filter:
            if not filter(it, *filterargs): break
        else:
            items.append((item.order(), -it.grade, item.prefix() + repr(it)))
    ret = "Level %d (%dxp) %s: \x1b[1;31m%s\x1b[0m (%d+%d items)" % (
        savefile.level, savefile.exp, cls, savefile.preferences.name,
        len(savefile.packed_weapon_data), len(savefile.packed_item_data) - 2)
    items.sort()
    ret += "".join("\n" + desc for order, lvl, desc in items if order >= 0)
    if args.synth is not None:
        # Make changes to the save file before synthesizing
        savefile.preferences.name = "PATCHED"  # Easy way to see what's happening
        for synth, synthargs in args.synth:
            synth(savefile, *synthargs)

        data = savefile.encode_protobuf()
        reconstructed = huffman_encode(data)
        reconstructed = b"".join([
            (3 + 4 + 4 + 4 + len(reconstructed) + 4).to_bytes(4, "big"),
            b"WSG",
            (2).to_bytes(4, endian),
            binascii.crc32(data).to_bytes(4, endian),
            len(data).to_bytes(4, endian),
            reconstructed,
            b"\xd4\x93\x9f\x1a",
        ])
        comp = len(reconstructed).to_bytes(4, "big") + lzo.compress(
            reconstructed, 1, False)
        comp = hashlib.sha1(comp).digest() + comp
        with open("synthesized.sav", "wb") as f:
            f.write(comp)
    return ret
示例#47
0
 def compress(data, *args, **kwargs):
     return lzo.compress(data, *args, **kwargs)