Пример #1
0
class DataBlock(Block):
    def __init__(self, length: int) -> None:
        super().__init__()

        self.buffer = BitStream(length=length * 8)
        self.pointers: List[Tuple[int, Optional[Block]]] = []

    def _add_pointer(self, offset: int, block: Optional[Block]) -> None:
        self.pointers.append((offset, block))

    def _at_offset(self, offset: int, *args: Any,
                   **kwargs: Any) -> bitstream_offset:
        return bitstream_offset(self.buffer, offset, *args, **kwargs)

    def prepare_bitstream(self) -> BitStream:
        for offset, obj in self.pointers:
            with bitstream_offset(self.buffer, offset):
                if obj is not None:
                    self.buffer.overwrite(pack('uintle:64', obj.offset))
                else:
                    self.buffer.overwrite(pack('uintle:64', 0))
        return self.buffer

    def get_all_pointers(self) -> List[int]:
        return sorted(
            [x + self.offset for x, p in self.pointers if p is not None])

    @abc.abstractmethod
    def alignment(self) -> int:
        pass

    def __len__(self) -> int:
        return len(self.buffer) // 8
Пример #2
0
def decrypt_com2us_png():
    com2us_decrypt_values = [
        0x2f, 0x7c, 0x47, 0x55, 0x32, 0x77, 0x9f, 0xfb, 0x5b, 0x86, 0xfe, 0xb6,
        0x3e, 0x06, 0xf4, 0xc4, 0x2e, 0x08, 0x49, 0x11, 0x0e, 0xce, 0x84, 0xd3,
        0x7b, 0x18, 0xa6, 0x5c, 0x71, 0x56, 0xe2, 0x3b, 0xfd, 0xb3, 0x2b, 0x97,
        0x9d, 0xfc, 0xca, 0xba, 0x8e, 0x7e, 0x6f, 0x0f, 0xe8, 0xbb, 0xc7, 0xc2,
        0xd9, 0xa4, 0xd2, 0xe0, 0xa5, 0x95, 0xee, 0xab, 0xf3, 0xe4, 0xcb, 0x63,
        0x25, 0x70, 0x4e, 0x8d, 0x21, 0x37, 0x9a, 0xb0, 0xbc, 0xc6, 0x48, 0x3f,
        0x23, 0x80, 0x20, 0x01, 0xd7, 0xf9, 0x5e, 0xec, 0x16, 0xd6, 0xd4, 0x1f,
        0x51, 0x42, 0x6c, 0x10, 0x14, 0xb7, 0xcc, 0x82, 0x7f, 0x13, 0x02, 0x00,
        0x72, 0xed, 0x90, 0x57, 0xc1, 0x2c, 0x5d, 0x28, 0x81, 0x1d, 0x38, 0x1a,
        0xac, 0xad, 0x35, 0x78, 0xdc, 0x68, 0xb9, 0x8b, 0x6a, 0xe1, 0xc3, 0xe3,
        0xdb, 0x6d, 0x04, 0x27, 0x9c, 0x64, 0x5a, 0x8f, 0x83, 0x0c, 0xd8, 0xa8,
        0x1c, 0x89, 0xd5, 0x43, 0x74, 0x73, 0x4d, 0xae, 0xea, 0x31, 0x6e, 0x1e,
        0x91, 0x1b, 0x59, 0xc9, 0xbd, 0xf7, 0x07, 0xe7, 0x8a, 0x05, 0x8c, 0x4c,
        0xbe, 0xc5, 0xdf, 0xe5, 0xf5, 0x2d, 0x4b, 0x76, 0x66, 0xf2, 0x50, 0xd0,
        0xb4, 0x85, 0xef, 0xb5, 0x3c, 0x7d, 0x3d, 0xe6, 0x9b, 0x03, 0x0d, 0x61,
        0x33, 0xf1, 0x92, 0x53, 0xff, 0x96, 0x09, 0x67, 0x69, 0x44, 0xa3, 0x4a,
        0xaf, 0x41, 0xda, 0x54, 0x46, 0xd1, 0xfa, 0xcd, 0x24, 0xaa, 0x88, 0xa7,
        0x19, 0xde, 0x40, 0xeb, 0x94, 0x5f, 0x45, 0x65, 0xf0, 0xb8, 0x34, 0xdd,
        0x0b, 0xb1, 0x29, 0xe9, 0x2a, 0x75, 0x87, 0x39, 0xcf, 0x79, 0x93, 0xa1,
        0xb2, 0x30, 0x15, 0x7a, 0x52, 0x12, 0x62, 0x36, 0xbf, 0x22, 0x4f, 0xc0,
        0xa2, 0x17, 0xc8, 0x99, 0x3a, 0x60, 0xa9, 0xa0, 0x58, 0xf6, 0x0a, 0x9e,
        0xf8, 0x6b, 0x26, 0x98
    ]

    for im_path in iglob('herders/static/herders/images/**/*.png',
                         recursive=True):
        encrypted = BitStream(filename=im_path)

        # Check if it is encrypted. 8th byte is 0x0B instead of the correct signature 0x0A
        encrypted.pos = 0x07 * 8
        signature = encrypted.peek('uint:8')
        if signature == 0x0B:
            print('Decrypting {}'.format(im_path))
            # Correct the PNG signature
            encrypted.overwrite('0x0A', encrypted.pos)

            # Replace bits with magic decrypted values
            try:
                while True:
                    pos = encrypted.pos
                    val = encrypted.peek('uint:8')
                    encrypted.overwrite(
                        Bits(uint=com2us_decrypt_values[val], length=8), pos)
            except ReadError:
                # EOF
                pass

            # Write it back to the file
            with open(im_path, 'wb') as f:
                encrypted.tofile(f)
Пример #3
0
 def valid_checksum(source_address, dest_address, bytes_packet):
     bin_packet = BitStream(bytes=bytes_packet)
     length = len(bin_packet) % 8
     checksum = bin_packet[128:144].bytes
     pseudo_header = TCPyPacket.create_pseudo_header(
         source_address, dest_address, length)
     bin_packet.overwrite(b'\x00\x00', 128)
     cs_calc = cs.Checksum16()
     cs_calc.process(bin_packet.tobytes())
     #cs_calc.process(pseudo_header)
     if checksum != cs_calc.finalbytes():
         return False
     return True
Пример #4
0
class GenericHeaderPage:
    """a generic header page for the three type of indices"""
    def __init__(self, pageSize):
        self.pSize = pageSize
        self.memPage = BitStream(self.pSize * BYTESIZE)

    def write(self, byteContents, bytePos):
        """write byte contents into the mempage at offset bytePos"""
        self.memPage.overwrite(byteContents, bytePos * BYTESIZE)

    def tell(self):
        return self.memPage.bytepos

    def rawPage(self):
        """return only the contents of a page"""
        return self.memPage.bytes
Пример #5
0
def decrypt_images(**kwargs):
    path = kwargs.pop('path', 'herders/static/herders/images')
    for im_path in iglob(f'{path}/**/*.png', recursive=True):
        encrypted = BitStream(filename=im_path)

        # Check if it is 'encrypted'. 8th byte is 0x0B instead of the correct signature 0x0A
        encrypted.pos = 0x07 * 8
        signature = encrypted.peek('uint:8')
        if signature == 0x0B:
            print(f'Decrypting {im_path}')
            # Correct the PNG signature
            encrypted.overwrite('0x0A', encrypted.pos)

            # Replace bits with magic decrypted values
            try:
                while True:
                    pos = encrypted.pos
                    val = encrypted.peek('uint:8')
                    encrypted.overwrite(
                        Bits(uint=com2us_decrypt_values[val], length=8), pos)
            except ReadError:
                # EOF
                pass

            # Write it back to the file
            with open(im_path, 'wb') as f:
                encrypted.tofile(f)

            continue

        # Check for weird jpeg format with extra header junk. Convert to png.
        encrypted.pos = 0
        if encrypted.peek('bytes:5') == b'Joker':
            print(f'Converting Joker container JPEG to PNG {im_path}')
            with open(im_path, 'rb') as f:
                img = JokerContainerFile(f)

            # Open it as a jpg and resave to disk
            try:
                new_imfile = Image.open(io.BytesIO(img.data.tobytes()))
                new_imfile.save(im_path)
            except IOError:
                print(f'Unable to open {im_path}')
 def bit_rep(self, file_path):
     key = Fernet.generate_key()
     f = Fernet(key)
     offsite_bits = BitStream()
     with open (file_path,'rb') as file:
         token = f.encrypt(file.read())
         data_stream = BitStream(bytes=token)
         # traverse all of the data stream in steps of size word_length
         # While data_stream.cut(word_length) is probably more efficient, value of n is useful here
         for n in range(0, len(data_stream), self.word_len):
             for m in range(0, self.word_len):
                 if m in self.pos_list:
                     if data_stream[n+m] is True:
                         offsite_bits.append('0b1')
                     else:
                         offsite_bits.append('0b0')
                         # Using urandom because of cryptographic security and because it is supported on major OSs
                         # Replaces the bits at bit_pos (within each word) with randomly generated bits.
                     data_stream.overwrite(bin(ord(urandom(1)) % 2), m+n)
     return data_stream, offsite_bits, key
def pack_pak(dir, file_list = None, align_toc = 16, align_files = 16, eof = False):
  
  if file_list == None:
    file_list = sorted(os.listdir(dir))
    
  num_files  = len(file_list)
  toc_length = (num_files + 1) * 4
  
  if eof:
    toc_length += 1
  
  if toc_length % align_toc > 0:
    toc_length += align_toc - (toc_length % align_toc)
  
  archive_data = BitStream(uintle = 0, length = toc_length * 8)
  archive_data.overwrite(bitstring.pack("uintle:32", num_files), 0)
  
  for file_num, item in enumerate(file_list):
    full_path = os.path.join(dir, item)
    
    if os.path.isfile(full_path):
      data = pack_file(full_path)
    else:
      data = pack_dir(full_path, align_toc, align_files, eof)
    
    file_size = data.len / 8
    padding = 0
    
    if file_size % align_files > 0:
      padding = align_files - (file_size % align_files)
      data.append(BitStream(uintle = 0, length = padding * 8))
    
    file_pos = archive_data.len / 8
    archive_data.overwrite(bitstring.pack("uintle:32", file_pos), (file_num + 1) * 32)
    archive_data.append(data)
    
    del data
  
  if eof:
    archive_data.overwrite(bitstring.pack("uintle:32", archive_data.len / 8), (num_files + 1) * 32)
  
  return archive_data
class GmoFile():
  def __init__(self, data = None, offset = 0, filename = None):
    self.data = None
    self.__gim_files = []
    
    self.gimconv = GimConverter()
    
    if not data == None:
      self.load_data(data, offset)
    elif not filename == None:
      self.load_file(filename)
  
  def load_file(self, filename):
    data = BitStream(filename = filename)
    self.load_data(data)
  
  def load_data(self, data, offset = 0):
    if not data[offset * 8 : offset * 8 + GMO_MAGIC.len] == GMO_MAGIC:
      _LOGGER.error("GMO header not found at 0x%04X." % offset)
      return
    
    data.bytepos = offset + GMO_SIZE_OFFSET
    gmo_size = data.read("uintle:32") + GMO_SIZE_DIFF
    
    self.data = BitStream(data[offset * 8 : (offset + gmo_size) * 8])
    
    self.__find_gims()
  
  def save(self, filename):
    with open(filename, "wb") as f:
      self.data.tofile(f)
  
  def __find_gims(self):
    if self.data == None:
      return
    
    self.__gim_files = []
    
    for gim_start in self.data.findall(GIM_MAGIC, bytealigned = True):
      gim_size_pos  = gim_start + (GIM_SIZE_OFFSET * 8) # Bit pos.
      gim_size      = self.data[gim_size_pos : gim_size_pos + 32].uintle + GIM_SIZE_DIFF
      
      # And turn it into a byte position.
      gim_start /= 8
      self.__gim_files.append((gim_start, gim_size))
  
  def gim_count(self):
    return len(self.__gim_files)
  
  def get_gim(self, gim_id):
    if gim_id >= self.gim_count():
      raise GimIndexError("Invalid GIM ID.")
    
    gim_start, gim_size = self.__gim_files[gim_id]
    gim_data = self.data[gim_start * 8 : (gim_start + gim_size) * 8]
    
    return gim_data
  
  def replace_png_file(self, gim_id, filename, quantize_to_fit = True):
  
    if quantize_to_fit:
      quantize_order = [QuantizeType.auto, QuantizeType.index8, QuantizeType.index4]
    else:
      quantize_order = [QuantizeType.auto]
    quantize_id = 0
    
    (fd, temp_gim) = tempfile.mkstemp(suffix = ".gim", prefix = "sdse-")
    os.close(fd) # Don't need the open file handle.
    
    while True:
      self.gimconv.png_to_gim(filename, temp_gim, quantize_order[quantize_id])
      
      try:
        self.replace_gim_file(gim_id, temp_gim)
      except GimSizeError:
        quantize_id += 1
      except GimIndexError:
        os.remove(temp_gim)
        raise
      else:
        # If we didn't except, that means we succeeded, so we can leave.
        _LOGGER.debug("Quantized PNG to %s" % quantize_order[quantize_id])
        break
      
      if quantize_id > len(quantize_order):
        _LOGGER.error("Unable to convert %s into a GIM small enough to insert." % filename)
        break
    
    os.remove(temp_gim)
  
  def replace_gim_file(self, gim_id, filename):
    gim_data = BitStream(filename = filename)
    self.replace_gim(gim_id, gim_data)
  
  def replace_gim(self, gim_id, gim_data):
    if gim_id >= self.gim_count():
      raise GimIndexError("Invalid GIM ID.")
    
    gim_start, gim_size = self.__gim_files[gim_id]
    
    if gim_data.len / 8 > gim_size:
      raise GimSizeError("GIM too large. %d bytes > %d bytes" % (gim_data.len / 8, gim_size))
      # return
    
    self.data.overwrite(gim_data, gim_start * 8)
    
    # Leave the length alone, though, because we know we have that much space
    # to work with from the original GIM file that was there, and there's no
    # point in shrinking that down if someone happens to want to re-replace
    # this GIM file without reloading the whole thing.
  
  def extract(self, directory, to_png = False):
    if not os.path.isdir(directory):
      os.makedirs(directory)
    
    for id in range(self.gim_count()):
      gim = self.get_gim(id)
      
      out_gim = os.path.join(directory, "%04d.gim" % id)
      out_png = os.path.join(directory, "%04d.png" % id)
      
      with open(out_gim, "wb") as f:
        gim.tofile(f)
      
      if to_png:
        self.gimconv.gim_to_png(out_gim, out_png)
        os.remove(out_gim)
Пример #9
0
class Page:
    """
    create a page of size self.pagesize in memory
    """
    def __init__(self, size, capacityFunction, entrySize=15):
        """
        @param capacityFunction: function to find the capacity of the page
        """
        self.pSize = size
        self.memPage = BitStream(self.pSize * BYTESIZE)

        # store a list of entries for this page
        self.entries = []
        # size of (key, rowid), default to 15 bytes
        self.entrySize = entrySize
        self.entriesCapacity = capacityFunction(self.pSize, self.entrySize)
        self.spaceRemaining = self.entriesCapacity

    def write(self, byteContents, bytePos):
        """write the bytecontents to atPos in self.memPage
        -bytePos is relative to the beginning of this page
        -internally, it will also advance the pointer upto the bytePos * 8 + byteLength(byteContents)
        """
        # NOTE: overwrite the contents at bytePos of this memory page with byteContents
        self.memPage.overwrite(byteContents, bytePos * BYTESIZE)

    def writeToFile(self, filePointer):
        """write the page to file"""
        self.memPage.tofile(filePointer)

    def read(self, bytePos, formatString):
        """read the content from bytePos and convert to formatString"""
        self.memPage.bytepos = bytePos
        return self.memPage.read(formatString)

    def tell(self):
        """return the byte position of the page currently in"""
        return self.memPage.bytepos

    def seek(self, absBytePos, relative=0):
        """seek to the certain byte position"""
        if relative:
            self.memPage.bytepos += absBytePos
        else:
            self.memPage.bytepos = absBytePos

    def rawPage(self):
        """return only the contents of a page"""
        return self.memPage.bytes

    def writeEntriesToPage(self, offset=0):
        """write all the raw entries in self.entries to the raw page in this object inheriate from Page"""
        self.seek(offset)
        # note, if the entry list is empty ==> automatically handles
        for entry in self.entries:
            writableEntry = entry.parseToBytes()
            self.write(writableEntry, self.tell())

    def insertEntry(self, entry):
        """
        @param dirEntry: a directory entry object
        """
        # constraint on the capacity
        if self.spaceRemaining <= 0:
            return 0

        self.spaceRemaining -= 1
        self.entries.append(entry)
        return 1

    def replaceEntries(self, entries):
        """overwrite the list of existing entries with entries
        
        - if the original entry list is bigger ==> now is shrinked
        - if the original entry list is smaller ==> now gets bigger
            assume that the size of the entries list is smaller than the entriesCapacity
        """
        size = len(entries)

        assert (size <= self.entriesCapacity)
        self.spaceRemaining = self.entriesCapacity - size

        assert (isinstance(entries, list))
        self.entries = entries

    def getSpaceRemaining(self):
        return self.spaceRemaining

    def getEntriesCapacity(self):
        return self.entriesCapacity

    def getEntrySize(self):
        return self.entrySize

    def allEntries(self):
        return self.entries

    def hasEntries(self):
        return self.getSpaceRemaining() < self.getEntriesCapacity()

    def numRecords(self):
        return len(self.entries)
class ModelPak():
  
  def __init__(self, filename = None):
    self.__data = None
    self.__gmo_files = []
    
    if filename:
      self.load_file(filename)
  
  def load_file(self, filename):
    data = BitStream(filename = filename)
    self.load_data(data)
  
  def load_data(self, data):
    files = [entry_data for (entry_name, entry_data) in get_pak_files(data)]
    
    # There are always at least four files in a model pak.
    # The first three I don't know a lot about, and then
    # the GMO files come after that.
    if len(files) < 4:
      _LOGGER.error("Invalid model PAK. %d files found, but at least 4 needed." % len(files))
      return
    
    # The name pak contains a list of null-terminated names for
    # each of the models, stored in our standard pak format.
    name_pak = files[0]
    names    = [entry_data.bytes.strip('\0') for (entry_name, entry_data) in get_pak_files(name_pak)]
    
    # Most of the model paks in SDR2 have a fourth unknown file before the models
    # start, so we'll just take everything from the back end and call it a day.
    models = files[-len(names):]
    
    # Now, we don't get file positions from the unpacker, so let's find those
    # and start filling out our internal list of GMO files.
    file_starts, file_ends = parse_pak_toc(data)
    model_starts = file_starts[-len(names):]
    
    for i, model in enumerate(models):
      # First of all, not all of the "models" present are actually GMO files.
      # It's rare, but there is the occasional other unknown format.
      # So let's make sure we have a GMO file.
      if not model[:GMO_MAGIC.len] == GMO_MAGIC:
        # print i, "Not a GMO."
        continue
      
      name  = names[i]
      gmo   = GmoFile(data = model)
      size  = model.len / 8
      start = model_starts[i]
      
      self.__gmo_files.append({
        _NAME:   name,
        _START:  start,
        _SIZE:   size,
        _DATA:   gmo,
      })
    
    self.__data = BitStream(data)
  
  def save(self, filename):
    self.__update_data()
    with open(filename, "wb") as f:
      self.__data.tofile(f)
  
  def __update_data(self):
    for gmo in self.__gmo_files:
      start = gmo[_START] * 8
      data  = gmo[_DATA].data
      
      self.__data.overwrite(data, start)
  
  def get_data(self):
    self.__update_data()
    return self.__data
  
  def gmo_count(self):
    return len(self.__gmo_files)
    
  def get_gmo(self, index):
    if index >= self.gmo_count() or index == None:
      _LOGGER.error("Invalid GMO ID %d." % index)
      return None
    
    return self.__gmo_files[index][_DATA]
  
  def get_gmos(self):
    return [gmo[_DATA] for gmo in self.__gmo_files]
  
  def get_name(self, index):
    if index >= self.gmo_count():
      _LOGGER.error("Invalid GMO ID %d." % index)
      return None
    
    return self.__gmo_files[index][_NAME]
  
  def get_names(self):
    return [gmo[_NAME] for gmo in self.__gmo_files]
  
  def id_from_name(self, name):
    for i in range(self.gmo_count()):
      if self.__gmo_files[i][_NAME] == name:
        return i
    
    return None
  
  def gmo_from_name(self, name):
    id = self.id_from_name(name)
    
    if id:
      return self.get_gmo(id)
    else:
      return None
  
  def replace_gmo_file(self, index, filename):
    gmo = GmoFile(filename = filename)
    self.replace_gmo(index, gmo)
    
  def replace_gmo(self, index, new_gmo):
    if index >= self.gmo_count():
      _LOGGER.error("Invalid GMO ID %d." % index)
      return None
    
    gmo = self.__gmo_files[index]
    
    if new_gmo.data.len / 8 > gmo[_SIZE]:
      _LOGGER.error("GMO too large to insert. %d bytes > %d bytes" % (new_gmo.data.len / 8, gmo[_SIZE]))
      return
    
    self.__gmo_files[index][_DATA] = new_gmo
    
    # Leave the length alone, though, because we know we have that much space
    # to work with from the original GMO file that was there, and there's no
    # point in shrinking that down if someone happens to want to re-replace
    # this GMO file without reloading the whole thing.
  
  def extract(self, directory, to_png = False):
    if not os.path.isdir(directory):
      os.makedirs(directory)
    
    for id in range(self.gmo_count()):
      gmo  = self.get_gmo(id)
      name = self.get_name(id)
      
      if gmo.gim_count() == 0:
        continue
      
      out_dir = os.path.join(directory, name)
      gmo.extract(out_dir, to_png)
Пример #11
0
class RecordPage:
    """
    create a page of size self.pagesize *in memory*
    """
    def __init__(self, size, byteContents=None):
        self.pSize = size

        if not byteContents:
            self.memPage = BitStream(self.pSize * BYTESIZE)
        else:
            self.memPage = BitStream(byteContents)
            self.memPage.bytepos = len(self.memPage) // 8

    def rawRecordPage(self):
        """return a byte object
        - if the page is partially filled, it only return the filled contents
        """
        # NOTE: the indexing syntax is interms of bits
        return self.memPage[:self.memPage.pos].bytes

    def overWrite(self, byteContents, offset=0):
        """write the bytecontents to atPos in self.memPage
        -bytePos is relative to the beginning of this page
        -internally, it will also advance the pointer upto the bytePos * 8 + byteLength(byteContents)
        """
        # overwrite the contents at offset
        self.memPage.bytepos = offset
        self.memPage.overwrite(byteContents,self.memPage.pos)

        if self.memPage.bytepos >= self.pSize:
            return 1
        return 0

    def write(self, filePointer, offset):
        """write the page to file"""
        filePointer.seek(offset, 0)
        filePointer.write(self.rawRecordPage())
        # clean up the in memory page
        self.flush()

    def read(self, bytePos, formatString):
        """read the content from bytePos and convert to formatString"""
        self.memPage.bytepos = bytePos
        return self.memPage.read(formatString)

    def tell(self):
        """return the byte position of the page currently in"""
        return self.memPage.bytepos
    
    def seek(self, absBytePos, relative=0):
        """seek to the certain byte position"""
        if relative:
            self.memPage.bytepos += absBytePos * BYTESIZE
        else:
            self.memPage.bytepos = absBytePos * BYTESIZE
    
    def pageSize(self):
        return self.pSize

    def getAllRecords(self):
        """return a list of record objects in page bufferPageIndex
        assume the page is filled
        """
        pageRecordList = []
        # NOTE: the last record should ends at self.memPage.bytepos through overwrite() methods
        for bytePos in range(0, self.memPage.bytepos, RECORDSIZE):
            # starting record offset
            firstName = self.read(bytePos, "bytes:{}".format(FIRSTNAMESIZE)).decode("utf-8")
            lastName = self.read(self.tell(), "bytes:{}".format(LASTNAMESIZE)).decode("utf-8")
            email = self.read(self.tell(), "bytes:{}".format(EMAILSIZE)).decode("utf-8")
            pageRecordList.append(Record(RECORDSIZE, (firstName, lastName, email)))
            # NOTE: includes Null character after each field
        return pageRecordList
    
    def hasRecords(self):
        return self.memPage.bytepos > 0
    
    def flush(self):
        self.memPage = BitStream(self.pSize * BYTESIZE)
Пример #12
0
def decrypt_images(**kwargs):
    path = kwargs.pop('path', 'herders/static/herders/images')
    for im_path in iglob(f'{path}/**/*.png', recursive=True):
        encrypted = BitStream(filename=im_path)

        # Check if it is 'encrypted'. 8th byte is 0x0B instead of the correct signature 0x0A
        encrypted.pos = 0x07 * 8
        signature = encrypted.peek('uint:8')
        if signature == 0x0B:
            print(f'Decrypting {im_path}')
            # Correct the PNG signature
            encrypted.overwrite('0x0A', encrypted.pos)

            # Replace bits with magic decrypted values
            try:
                while True:
                    pos = encrypted.pos
                    val = encrypted.peek('uint:8')
                    encrypted.overwrite(
                        Bits(uint=com2us_decrypt_values[val], length=8), pos)
            except ReadError:
                # EOF
                pass

            # Write it back to the file
            with open(im_path, 'wb') as f:
                encrypted.tofile(f)

            continue

        # Check for weird jpeg format with extra header junk. Convert to png.
        encrypted.pos = 0
        if encrypted.peek('bytes:5') == b'Joker':
            print(f'Converting Joker container JPEG to PNG {im_path}')
            with open(im_path, 'rb') as f:
                bts = f.read()
                first_img = bts.find(b'JFIF')
                second_img = bts.rfind(b'JFIF')
                imgs = []
                if second_img > -1 and first_img != second_img:
                    imgs = [bts[:second_img], bts[second_img:]]
                    # Add Joker & header to immitate new file
                    imgs[1] = imgs[0][imgs[0].find(b'Joker'
                                                   ):first_img] + imgs[1]
                    imgs = [
                        JokerContainerFile(img, read=False) for img in imgs
                    ]
                else:
                    img = JokerContainerFile(bts, read=False)

            # Open it as a jpg and resave to disk
            try:
                if len(imgs) > 1:
                    new_imfile = Image.open(io.BytesIO(imgs[0].data.tobytes()))
                    new_mask = Image.open(io.BytesIO(
                        imgs[1].data.tobytes())).convert('L')
                    new_imfile.putalpha(new_mask)
                else:
                    new_imfile = Image.open(io.BytesIO(img.data.tobytes()))
                new_imfile.save(im_path)
            except IOError:
                print(f'Unable to open {im_path}')
Пример #13
0
 def prepare_bitstream(self) -> BitStream:
     bs = BitStream(length=self.length * 8)
     for offset, block in self.contained:
         with bitstream_offset(bs, offset):
             bs.overwrite(block.prepare_bitstream())
     return bs
Пример #14
0
    for box_header in Parser.parse(bstr, headers_only=True):
        if bstr.bytepos >= mdia_boxes_end:
            break

        if box_header.type != b"hdlr":
            box = Parser.parse_box(bstr, box_header)
            box.load(bstr)

        else:
            # hdlr.name should finish with a b'\0'. If it doesn't, add one
            # Add a b'\0' for safety
            hdlr_bstr = BitStream(bytes(box_header))
            hdlr_header = Parser.parse_header(hdlr_bstr)
            hdlr_header.box_size += 1
            hdlr_bstr.overwrite(bytes(hdlr_header), 0)
            hdlr_bstr.append(
                bstr.read("bytes:{}".format(box_header.box_size -
                                            box_header.header_size)) + b'\0')
            box = Parser.parse_box(hdlr_bstr, hdlr_header)
            box.load(hdlr_bstr)

            # Prevent adding one too many b'\0'
            if box.padding.startswith(b'\0'):
                box.padding = box.padding[1:]

        mdia.append(box)

del bstr

moov.refresh_box_size()
 def create_archives(self):
   
   try:
     self.width = self.parent.width()
     self.height = self.parent.height()
     self.x = self.parent.x()
     self.y = self.parent.y()
   except:
     self.width = 1920
     self.height = 1080
     self.x = 0
     self.y = 0
   
   self.progress = QProgressDialog("Reading...", QtCore.QString(), 0, 7600, self.parent)
   self.progress.setWindowModality(Qt.Qt.WindowModal)
   self.progress.setValue(0)
   self.progress.setAutoClose(False)
   self.progress.setMinimumDuration(0)
   
   USRDIR     = os.path.join(common.editor_config.iso_dir, "PSP_GAME", "USRDIR")
   eboot_path = os.path.join(common.editor_config.iso_dir, "PSP_GAME", "SYSDIR", "EBOOT.BIN")
   
   eboot = BitStream(filename = eboot_path)
   eboot = eboot_patch.apply_eboot_patches(eboot)
   
   # So we can loop. :)
   ARCHIVE_INFO = [
     {
       "dir":  common.editor_config.data00_dir,
       "cpk":  os.path.join(USRDIR, "data00.cpk"),
       "csv":  os.path.join("data", "data00.csv" if not common.editor_config.quick_build else "data00-quick.csv"),
       "name": "data00.cpk",
       "pack": common.editor_config.pack_data00,
     },
     {
       "dir":  common.editor_config.data01_dir,
       "cpk":  os.path.join(USRDIR, "data01.cpk"),
       "csv":  os.path.join("data", "data01.csv" if not common.editor_config.quick_build else "data01-quick.csv"),
       "name": "data01.cpk",
       "pack": common.editor_config.pack_data01,
     },
   ]
   
   # temp_dir = tempfile.mkdtemp(prefix = "sdse-")
   temp_dir = common.editor_config.build_cache
   
   for archive in ARCHIVE_INFO:
     
     if not archive["pack"]:
       continue
     
     self.progress.setWindowTitle("Building " + archive["name"])
     
     toc_info = {}
     file_list = None
     
     if archive["toc"]:
       file_list = []
       
       toc = get_toc(eboot, archive["toc"])
       
       for entry in toc:
         filename  = entry["filename"]
         pos_pos   = entry["file_pos_pos"]
         len_pos   = entry["file_len_pos"]
         
         toc_info[filename] = [pos_pos, len_pos]
         file_list.append(filename)
     
     # Causes memory issues if I use the original order, for whatever reason.
     file_list = None
     
     csv_template_f  = open(archive["csv"], "rb")
     csv_template    = csv.reader(csv_template_f)
     
     csv_out_path    = os.path.join(temp_dir, "cpk.csv")
     csv_out_f       = open(csv_out_path, "wb")
     csv_out         = csv.writer(csv_out_f)
     
     for row in csv_template:
       if len(row) < 4:
         continue
       
       base_path = row[0]
       
       real_path = os.path.join(archive["dir"], base_path)
       out_path  = os.path.join(temp_dir, archive["name"], base_path)
       
       self.progress.setValue(self.progress.value() + 1)
       self.progress.setLabelText("Reading...\n%s" % real_path)
       
       # All items in the CPK list should be files.
       # Therefore, if we have a directory, then it needs to be packed.
       if os.path.isdir(real_path):
         if self.__cache_outdated(real_path, out_path):
           out_dir = os.path.dirname(out_path)
           try:
             os.makedirs(out_dir)
           except:
             pass
           
           data = pack_dir(real_path)
           with open(out_path, "wb") as out_file:
             data.tofile(out_file)
           del data
           
       elif os.path.isfile(real_path):
       # If it's a file, though, we can just use it directly.
         out_path = real_path
         
       row[0] = out_path
       csv_out.writerow(row)
     
     csv_template_f.close()
     csv_out_f.close()
     
     self.__pack_cpk(csv_out_path, archive["cpk"])
     
     # We're playing fast and loose with the file count anyway, so why not?
     self.file_count += 1
     self.progress.setValue(self.file_count)
     self.progress.setLabelText("Saving " + archive["name"] + "...")
     
     if archive["toc"]:
       for entry in table_of_contents:
         if not entry in toc_info:
           _LOGGER.warning("%s missing from %s table of contents." % (entry, archive["name"]))
           continue
         
         file_pos  = table_of_contents[entry]["pos"]
         file_size = table_of_contents[entry]["size"]
         
         eboot.overwrite(BitStream(uintle = file_pos, length = 32),  toc_info[entry][0] * 8)
         eboot.overwrite(BitStream(uintle = file_size, length = 32), toc_info[entry][1] * 8)
     
     del table_of_contents
   
   self.progress.setWindowTitle("Building...")
   self.progress.setLabelText("Saving EBOOT.BIN...")
   self.progress.setValue(self.progress.maximum())
   
   with open(eboot_path, "wb") as f:
     eboot.tofile(f)
     
   # Text replacement
   to_replace = eboot_text.get_eboot_text()
   for replacement in to_replace:
   
     orig = bytearray(replacement.orig, encoding = replacement.enc)
     
     # If they left something blank, write the original text back.
     if len(replacement.text) == 0:
       data = orig
     else:
       data = bytearray(replacement.text, encoding = replacement.enc)
     
     pos  = replacement.pos.int + eboot_offset
     
     padding = len(orig) - len(data)
     if padding > 0:
       # Null bytes to fill the rest of the space the original took.
       data.extend(bytearray(padding))
     
     data = ConstBitStream(bytes = data)
     eboot.overwrite(data, pos * 8)
   
   eboot_out = os.path.join(common.editor_config.iso_dir, "PSP_GAME", "SYSDIR", "EBOOT.BIN")
   
   with open(eboot_out, "wb") as f:
     eboot.tofile(f)
   
   self.progress.close()
    # traverse all of the data stream in steps of size word_length
    # While data_stream.cut(word_length) is probably more efficient, value of n is useful here
    for n in range(0, len(data_stream), word_length):
        # print str(data_stream.read(word_length)) + " " + str(i)
        # i += 1
        for m in range(0, word_length):
            
            if m in bit_pos_test:
                # print data_stream[n+m]  #debugging
                if data_stream[n+m] is True:
                    offsite_bits.append('0b1')
                else:
                    offsite_bits.append('0b0')
                # Using urandom because of cryptographic security and because it is supported on major OSs
                # Replaces the bits at bit_pos (within each word) with randomly generated bits.
                data_stream.overwrite(bin(ord(urandom(1)) % 2), m+n)
    print
    print "Exchanged bits token"
    print
    print data_stream.bin
    print
    # f.decrypt(data_stream.bytes)
    print "Stored bits"
    print
    print offsite_bits.bin

    for n in range (0, len(data_stream), word_length):
        for m in range (0, word_length):
            if m in bit_pos_test:
                data_stream.overwrite(offsite_bits.read(1), m+n)
    # print f.decrypt(data_stream.bytes)
    def create_archives(self):

        try:
            self.width = self.parent.width()
            self.height = self.parent.height()
            self.x = self.parent.x()
            self.y = self.parent.y()
        except:
            self.width = 1920
            self.height = 1080
            self.x = 0
            self.y = 0

        self.file_count = 0

        self.progress = QProgressDialog("Reading...", QtCore.QString(), 0,
                                        72000, self.parent)
        self.progress.setWindowModality(Qt.Qt.WindowModal)
        self.progress.setValue(0)
        self.progress.setAutoClose(False)
        self.progress.setMinimumDuration(0)

        # with open(common.editor_config.eboot_orig, "rb") as f:
        with open(
                os.path.join(common.editor_config.iso_dir, "PSP_GAME",
                             "SYSDIR", "EBOOT.BIN"), "rb") as f:
            eboot = BitStream(bytes=f.read())

        eboot, eboot_offset = eboot_patch.apply_eboot_patches(eboot)

        USRDIR = os.path.join(common.editor_config.iso_dir, "PSP_GAME",
                              "USRDIR")

        # So we can loop. :)
        ARCHIVE_INFO = [
            {
                "toc": UMDIMAGES.umdimage,
                "dir": common.editor_config.umdimage_dir,
                "dat": os.path.join(USRDIR, "umdimage.dat"),
                "name": "umdimage.dat",
                "pack": common.editor_config.pack_umdimage,
                "eof": False,
            },
            {
                "toc": UMDIMAGES.umdimage2,
                "dir": common.editor_config.umdimage2_dir,
                "dat": os.path.join(USRDIR, "umdimage2.dat"),
                "name": "umdimage2.dat",
                "pack": common.editor_config.pack_umdimage2,
                "eof": False,
            },
            {
                "toc": None,
                "dir": common.editor_config.voice_dir,
                "dat": os.path.join(USRDIR, "voice.pak"),
                "name": "voice.pak",
                "pack": common.editor_config.pack_voice,
                "eof": True,
            },
            {
                "toc": None,
                "dir": common.editor_config.bgm_dir,
                "dat": os.path.join(USRDIR, "bgm.pak"),
                "name": "bgm.pak",
                "pack": common.editor_config.pack_bgm,
                "eof": True,
            },
        ]

        for archive in ARCHIVE_INFO:

            if not archive["pack"]:
                continue

            self.progress.setWindowTitle("Building " + archive["name"])

            toc_info = {}
            file_list = None

            if archive["toc"]:
                file_list = []

                toc = get_toc(eboot, archive["toc"])

                for entry in toc:
                    filename = entry["filename"]
                    pos_pos = entry["file_pos_pos"]
                    len_pos = entry["file_len_pos"]

                    toc_info[filename] = [pos_pos, len_pos]
                    file_list.append(filename)

            # Causes memory issues if I use the original order, for whatever reason.
            file_list = None

            with io.FileIO(archive["dat"], "w") as handler:
                table_of_contents = self.pack_dir(archive["dir"],
                                                  handler,
                                                  file_list=file_list,
                                                  eof=archive["eof"])

            # We're playing fast and loose with the file count anyway, so why not?
            self.file_count += 1
            self.progress.setValue(self.file_count)
            self.progress.setLabelText("Saving " + archive["name"] + "...")

            if archive["toc"]:
                for entry in table_of_contents:
                    if not entry in toc_info:
                        _LOGGER.warning(
                            "%s missing from %s table of contents." %
                            (entry, archive["name"]))
                        continue

                    file_pos = table_of_contents[entry]["pos"]
                    file_size = table_of_contents[entry]["size"]

                    eboot.overwrite(BitStream(uintle=file_pos, length=32),
                                    toc_info[entry][0] * 8)
                    eboot.overwrite(BitStream(uintle=file_size, length=32),
                                    toc_info[entry][1] * 8)

            del table_of_contents

        self.progress.setLabelText("Saving EBOOT.BIN...")
        self.progress.setValue(self.progress.maximum())

        # Text replacement
        to_replace = eboot_text.get_eboot_text()
        for replacement in to_replace:

            orig = bytearray(replacement.orig, encoding=replacement.enc)

            # If they left something blank, write the original text back.
            if len(replacement.text) == 0:
                data = orig
            else:
                data = bytearray(replacement.text, encoding=replacement.enc)

            pos = replacement.pos.int + eboot_offset

            padding = len(orig) - len(data)
            if padding > 0:
                # Null bytes to fill the rest of the space the original took.
                data.extend(bytearray(padding))

            data = ConstBitStream(bytes=data)
            eboot.overwrite(data, pos * 8)

        eboot_out = os.path.join(common.editor_config.iso_dir, "PSP_GAME",
                                 "SYSDIR", "EBOOT.BIN")

        with open(eboot_out, "wb") as f:
            eboot.tofile(f)

        self.progress.close()
Пример #18
0
def change_reg(reg_no, data, pos_start):
    bit_array = BitStream(bin(read_reg(reg_no)))
    bit_array.pos = pos_start
    bit_array.overwrite('0b0101')
    return bit_array
Пример #19
0
class GmoFile():
    def __init__(self, data=None, offset=0, filename=None):
        self.data = None
        self.__gim_files = []

        self.gimconv = GimConverter()

        if not data == None:
            self.load_data(data, offset)
        elif not filename == None:
            self.load_file(filename)

    def load_file(self, filename):
        data = BitStream(filename=filename)
        self.load_data(data)

    def load_data(self, data, offset=0):
        if not data[offset * 8:offset * 8 + GMO_MAGIC.len] == GMO_MAGIC:
            _LOGGER.error("GMO header not found at 0x%04X." % offset)
            return

        data.bytepos = offset + GMO_SIZE_OFFSET
        gmo_size = data.read("uintle:32") + GMO_SIZE_DIFF

        self.data = BitStream(data[offset * 8:(offset + gmo_size) * 8])

        self.__find_gims()

    def save(self, filename):
        with open(filename, "wb") as f:
            self.data.tofile(f)

    def __find_gims(self):
        if self.data == None:
            return

        self.__gim_files = []

        for gim_start in self.data.findall(GIM_MAGIC, bytealigned=True):
            gim_size_pos = gim_start + (GIM_SIZE_OFFSET * 8)  # Bit pos.
            gim_size = self.data[gim_size_pos:gim_size_pos +
                                 32].uintle + GIM_SIZE_DIFF

            # And turn it into a byte position.
            gim_start /= 8
            self.__gim_files.append((gim_start, gim_size))

    def gim_count(self):
        return len(self.__gim_files)

    def get_gim(self, gim_id):
        if gim_id >= self.gim_count():
            raise GimIndexError("Invalid GIM ID.")

        gim_start, gim_size = self.__gim_files[gim_id]
        gim_data = self.data[gim_start * 8:(gim_start + gim_size) * 8]

        return gim_data

    def replace_png_file(self, gim_id, filename, quantize_to_fit=True):

        if quantize_to_fit:
            quantize_order = [
                QuantizeType.auto, QuantizeType.index8, QuantizeType.index4
            ]
        else:
            quantize_order = [QuantizeType.auto]
        quantize_id = 0

        (fd, temp_gim) = tempfile.mkstemp(suffix=".gim", prefix="sdse-")
        os.close(fd)  # Don't need the open file handle.

        while True:
            self.gimconv.png_to_gim(filename, temp_gim,
                                    quantize_order[quantize_id])

            try:
                self.replace_gim_file(gim_id, temp_gim)
            except GimSizeError:
                quantize_id += 1
            except GimIndexError:
                os.remove(temp_gim)
                raise
            else:
                # If we didn't except, that means we succeeded, so we can leave.
                _LOGGER.debug("Quantized PNG to %s" %
                              quantize_order[quantize_id])
                break

            if quantize_id > len(quantize_order):
                _LOGGER.error(
                    "Unable to convert %s into a GIM small enough to insert." %
                    filename)
                break

        os.remove(temp_gim)

    def replace_gim_file(self, gim_id, filename):
        gim_data = BitStream(filename=filename)
        self.replace_gim(gim_id, gim_data)

    def replace_gim(self, gim_id, gim_data):
        if gim_id >= self.gim_count():
            raise GimIndexError("Invalid GIM ID.")

        gim_start, gim_size = self.__gim_files[gim_id]

        if gim_data.len / 8 > gim_size:
            raise GimSizeError("GIM too large. %d bytes > %d bytes" %
                               (gim_data.len / 8, gim_size))
            # return

        self.data.overwrite(gim_data, gim_start * 8)

        # Leave the length alone, though, because we know we have that much space
        # to work with from the original GIM file that was there, and there's no
        # point in shrinking that down if someone happens to want to re-replace
        # this GIM file without reloading the whole thing.

    def extract(self, directory, to_png=False):
        if not os.path.isdir(directory):
            os.makedirs(directory)

        for id in range(self.gim_count()):
            gim = self.get_gim(id)

            out_gim = os.path.join(directory, "%04d.gim" % id)
            out_png = os.path.join(directory, "%04d.png" % id)

            with open(out_gim, "wb") as f:
                gim.tofile(f)

            if to_png:
                self.gimconv.gim_to_png(out_gim, out_png)
                os.remove(out_gim)