Example #1
0
 def save_chunks(self, compression_type: int = 2) -> None:
     if not 1 <= compression_type <= 2:  # Check compression type is 1 => GZip 2 => Zlib Deflate or else error
         raise Exception(
             f"ERROR: invalid compression type {compression_type}")
     file: object = open(self.__path, "wb")  # Open region file for writing
     index_stream: object = binary_stream(
     )  # Create the chunk locations stream
     timestamp_stream: object = binary_stream(
     )  # Create the chunk timestamps stream
     chunks_stream: object = binary_stream()  # Create the chunks stream
     pos: int = 2  # Just to calculate the position of each chunk
     for i in range(0, 1024):  # Just write all chunks
         if not isinstance(self.chunks[i],
                           empty_chunk):  # Check if not is empty chunk
             chunk_stream: object = binary_stream()  # Create chunk stream
             chunk_data: bytes = self.chunks[i].write_data()  # encode chunk
             if compression_type == 1:  # Check if is GZip
                 compressed_chunk_data: bytes = gzip.compress(
                     chunk_data)  # compressed chunk data
             elif compression_type == 2:  # Check if is Zlib Deflate
                 compressed_chunk_data: bytes = zlib.compress(
                     chunk_data)  # compressed chunk data
             chunk_stream.write_int_be(
                 len(compressed_chunk_data)
             )  # Write the size on disk (in bytes) of the compressed chunk data to the stream
             chunk_stream.write_unsigned_byte(
                 compression_type
             )  # Write the compression type to the stream
             chunk_stream.write(
                 compressed_chunk_data
             )  # write the compressed chunk data to the stream
             ii: int = 0  # pending chunk size (in bytes)
             while True:
                 remaining: int = ii - len(
                     chunk_stream.data)  # Trailing 0 count
                 if remaining > 0:  # check if got the correct size
                     size: int = ii  # save the size
                     break
                 ii += 4096  # Just add up until get the correct value
             chunk_stream.write(b"\x00" * remaining)
             chunks_stream.write(
                 chunk_stream.data)  # Write the chunk to the chunks stream
             sector_count: int = int(size / 4096)  # chunk size (in sectors)
             index_stream.write_unsigned_triad_be(
                 pos)  # Write the calculated position to the sectors stream
             index_stream.write_unsigned_byte(
                 sector_count
             )  # Write th chunk size (in sectors) to the sectors stream
             pos += sector_count  # Add the chunk size (in sectors) to the position
             timestamp_stream.write_unsigned_int_be(int(time.time(
             )))  # Write the current timestamp to the timestamps stream
         else:  # If is empty chunk
             index_stream.write_unsigned_triad_be(
                 0)  # Write empty position to the sectors stream
             index_stream.write_unsigned_byte(
                 0)  # Write empty size to the sectors stream
             timestamp_stream.write_unsigned_int_be(
                 0)  # Write empty timestamp to the timestamps stream
     data: bytes = index_stream.data + timestamp_stream.data + chunks_stream.data  # Connect the data blobs togeather
     file.write(data)  # Save changes to region file
Example #2
0
 def write_packet_data(self, data):
     buffer: object = binary_stream()
     buffer.write_var_int(len(data))
     buffer.write(data)
     if hasattr(self, "body"):
         self.body += buffer.data
     else:
         self.body: bytes = buffer.data
Example #3
0
 def decode_payload(self) -> None:
     self.protocol_version: int = self.read_unsigned_int_be()
     self.chain_data: list = []
     buffer: object = binary_stream(self.read_byte_array())
     raw_chain_data: dict = json.loads(buffer.read(buffer.read_unsigned_int_le()).decode())
     for chain in raw_chain_data["chain"]:
         self.chain_data.append(jwt.decode(chain))
     self.skin_data: dict = jwt.decode(buffer.read(buffer.read_unsigned_int_le()).decode())
Example #4
0
 def network_serialize(self) -> object:
     stream: object = binary_stream()
     for y in range(0, self.get_sub_chunk_send_count()):
         self.sub_chunks[y].network_serialize(stream)
     stream.write_var_int(len(self.biomes))
     for biome in self.biomes:
         stream.write_unsigned_byte(biome)
     stream.write_unsigned_byte(0)
     return stream.data
Example #5
0
 def load_chunks(self) -> None:
     file: object = open(self.__path, "rb")  # Open the region file
     data: bytes = file.read(
     )  # Save the data located in the file to a variable
     self.chunks: list = []  # Chunks Storage
     index_stream: object = binary_stream(
         data[0:4096])  # The encoded chunk locations table
     timestamp_stream: object = binary_stream(
         data[4096:4096 + 4096])  # The encoded chunk timestamps table
     for i in range(0, 1024):  # Just read all chunks
         pos: int = index_stream.read_unsigned_triad_be(
         )  # decoded chunk location (in sectors) from the locations table
         size: int = index_stream.read_unsigned_byte(
         )  # decoded chunk size (in sectors) from the locations table
         timestamp: int = timestamp_stream.read_unsigned_int_be(
         )  # decoded chunk timestamp from the locations table
         if pos != 0 and size != 0:  # Check if the chunk exist in the region file
             chunk_stream: object = binary_stream(
                 data[(pos * 4096):(pos * 4096) +
                      (size * 4096)])  # Encoded chunk
             size_on_disk: int = chunk_stream.read_unsigned_int_be(
             )  # Size (in bytes) of the compressed chunk
             compression_type: int = chunk_stream.read_unsigned_byte(
             )  # Compression type 1 => GZip 2 => Zlib Deflate
             compressed_chunk_data: bytes = chunk_stream.read(
                 size_on_disk)  # Compressed chunk
             if compression_type == 1:  # Check if is GZip
                 chunk_data: bytes = gzip.decompress(
                     compressed_chunk_data)  # decompressed chunk data
             elif compression_type == 2:  # Check if is Zlib Deflate
                 chunk_data: bytes = zlib.decompress(
                     compressed_chunk_data)  # decompressed chunk data
             else:
                 raise Exception(
                     f"ERROR: invalid compression type {compression_type}"
                 )  # If the compression types is invalid error
             new_chunk: object = chunk()  # Create the chunk object
             new_chunk.read_data(
                 chunk_data)  # Decode the fetched chunk data
             self.chunks.append(
                 new_chunk)  # Append chunk to the chunk storage
         else:  # If chunk does not exist just append a empty chunk to the chunks storage
             self.chunks.append(empty_chunk())
Example #6
0
 def network_deserialize(self,
                         data: bytes,
                         sub_chunk_count: int = 16) -> None:
     stream: object = binary_stream(data)
     for y in range(0, sub_chunk_count):
         sc: object = sub_chunk()
         sc.network_deserialize(stream)
         self.sub_chunks[y] = sc
     self.biomes: list = []
     for i in range(0, stream.read_var_int()):
         self.biomes.append(stream.read_unsigned_byte())
Example #7
0
def encode_packet(packet: dict) -> bytes:
    stream: object = binary_stream()
    packet_fields: dict = get_packet_fields(packet["id"])
    if packet_fields is not None:
        stream.write_unsigned_byte(packet["id"])
        for field_name, field_type in packet_fields.items():
            if field_name in packet:
                encode_data_type(field_type, packet[field_name], stream)
        return stream.data
    else:
        return b""
Example #8
0
 def read_uuid(self) -> str:
     stream: object = binary_stream()
     for i in range(0, 4):
         stream.write_int_be(self.read_int_le())
     return b"-".join([
         binascii.hexlify(stream.read(4)),
         binascii.hexlify(stream.read(2)),
         binascii.hexlify(stream.read(2)),
         binascii.hexlify(stream.read(2)),
         binascii.hexlify(stream.read(6))
     ]).decode()
Example #9
0
def decode_packet(data: bytes) -> dict:
    stream: object = binary_stream(data)
    packet_id: int = stream.read_unsigned_byte()
    packet_fields: dict = get_packet_fields(packet_id)
    if packet_fields is not None:
        packet: dict = {"id": packet_id}
        for field_name, field_type in packet_fields.items():
            if not stream.feos():
                packet[field_name]: Union[int, float, str, list, bytes] = decode_data_type(field_type, stream)
        return packet
    else:
        return {}
Example #10
0
 def encode_payload(self) -> None:
     self.write_unsigned_int_be(self.protocol_version)
     raw_chain_data: dict = {"chain": []}
     for chain in self.chain_data:
         jwt_data: str = jwt.encode({"alg": "HS256", "typ": "JWT"}, chain, misc.mojang_public_key)
         raw_chain_data["chain"].append(jwt_data)
     temp_stream = binary_stream()
     json_data: str = json.dumps(raw_chain_data)
     temp_stream.write_unsigned_int_le(len(json_data))
     temp_stream.write(json_data.encode())
     self.write_byte_array(temp_stream.data)
     jwt_data: str = jwt.encode({"alg": "HS256", "typ": "JWT"}, self.skin_data, misc.mojang_public_key)
     self.write_unsigned_int_le(len(jwt_data))
     self.write(jwt_data.encode())
Example #11
0
 def read_packets_data(self):
     buffer: object = binary_stream(self.body)
     packets_data: list = []
     while not buffer.feos():
         packets_data.append(buffer.read(buffer.read_var_int()))
     return packets_data
Example #12
0
 def write_uuid(self, uuid: str) -> None:
     stream: object = binary_stream(
         binascii.unhexlify(uuid.replace("-", "")))
     for i in range(0, 4):
         self.write_int_le(stream.read_int_be())