def repackage(self, extracted_apk_dir, dex_dir, have_locators): BaseDexMode.repackage(self, extracted_apk_dir, dex_dir, have_locators) dex_sizes = {} jar_sizes = {} concat_jar_path = join(dex_dir, self._store_name + '.dex.jar') concat_jar_meta = join(dex_dir, 'metadata.txt') dex_metadata = DexMetadata(have_locators=have_locators, store=self._store_id, dependencies=self._dependencies) with open(concat_jar_path, 'wb') as concat_jar: for i in range(1, 100): oldpath = join(dex_dir, self._dex_prefix + '%d.dex' % (i + 1)) if not isfile(oldpath): break dexpath = join(dex_dir, self._store_name + '-%d.dex' % i) # Package each dex into a jar shutil.move(oldpath, dexpath) jarpath = dexpath + '.jar' create_dex_jar(jarpath, dexpath) dex_sizes[jarpath] = getsize(dexpath) jar_sizes[jarpath] = getsize(jarpath) # Concatenate the jar files and create corresponding metadata files with open(jarpath + '.xzs.tmp~.meta', 'w') as metadata: sizes = 'jar:{} dex:{}'.format( jar_sizes[jarpath], dex_sizes[jarpath]) metadata.write(sizes) with open(jarpath, 'rb') as jar: contents = jar.read() concat_jar.write(contents) sha1hash = hashlib.sha1(contents).hexdigest() dex_metadata.add_dex(jarpath + '.xzs.tmp~', BaseDexMode.get_canary(self, i), hash=sha1hash) dex_metadata.write(concat_jar_meta) assert getsize(concat_jar_path) == sum(getsize(x) for x in abs_glob(dex_dir, self._store_name + '-*.dex.jar')) # XZ-compress the result subprocess.check_call(['xz', '-z6', '--check=crc32', '--threads=6', concat_jar_path]) # Copy all the archive and metadata back to the apk directory secondary_dex_dir = join(extracted_apk_dir, self._xzs_dir) for path in abs_glob(dex_dir, self._store_name + '*.meta'): shutil.copy(path, secondary_dex_dir) shutil.copy(concat_jar_meta, join(secondary_dex_dir, 'metadata.txt')) shutil.copy(concat_jar_path + '.xz', join(secondary_dex_dir, self._xzs_filename))
def unpackage(self, extracted_apk_dir, dex_dir): jar_meta_path = join(extracted_apk_dir, self._secondary_dir, 'metadata.txt') if os.path.exists(jar_meta_path): os.remove(jar_meta_path) for path in abs_glob(extracted_apk_dir, '*.dex'): shutil.move(path, dex_dir)
def write_redex_metadata(self, path, metadata_file): files = [] for x in abs_glob(path, '*.dex'): files.append(x) metadata = {'id': self.name, 'requires': self.dependencies, 'files': files} with open(metadata_file, 'w') as store_metadata: json.dump(metadata, store_metadata)
def unpackage(self, extracted_apk_dir, dex_dir): jars = abs_glob(join(extracted_apk_dir, self._secondary_dir), '*.dex.jar') for jar in jars: dexpath = join(dex_dir, basename(jar))[:-4] extract_dex_from_jar(jar, dexpath) os.remove(jar + ".meta") os.remove(jar) os.remove(join(extracted_apk_dir, self._secondary_dir, 'metadata.txt')) shutil.move(join(extracted_apk_dir, 'classes.dex'), dex_dir)
def unpackage(self, extracted_apk_dir, dex_dir): jars = abs_glob(join(extracted_apk_dir, self._secondary_dir), '*.dex.jar') for jar in jars: dexpath = join(dex_dir, basename(jar))[:-4] extract_dex_from_jar(jar, dexpath) os.remove(jar + '.meta') os.remove(jar) os.remove(join(extracted_apk_dir, self._secondary_dir, 'metadata.txt')) BaseDexMode.unpackage(self, extracted_apk_dir, dex_dir)
def unpackage(self, extracted_apk_dir, dex_dir): BaseDexMode.unpackage(self, extracted_apk_dir, dex_dir) metadata_dir = join(extracted_apk_dir, self._secondary_dir) if self._is_root_relative: extracted_dex_dir = extracted_apk_dir else: extracted_dex_dir = metadata_dir for path in abs_glob(extracted_dex_dir, '*.dex'): shutil.move(path, dex_dir)
def write_redex_metadata(self, path): files = [] for x in abs_glob(path, '*.dex'): files.append(x) store_file = os.path.join(path, 'store.txt') metadata = {'id': self.name, 'requires': self.dependencies, 'files': files} with open(store_file, 'w') as store_metadata: json.dump(metadata, store_metadata) return store_file
def unpackage(self, extracted_apk_dir, dex_dir): BaseDexMode.unpackage(self, extracted_apk_dir, dex_dir) metadata_dir = join(extracted_apk_dir, self._secondary_dir) jar_meta_path = join(metadata_dir, 'metadata.txt') if os.path.exists(jar_meta_path): os.remove(jar_meta_path) if self._is_root_relative: extracted_dex_dir = extracted_apk_dir else: extracted_dex_dir = metadata_dir for path in abs_glob(extracted_dex_dir, '*.dex'): shutil.move(path, dex_dir)
def unpackage(self, extracted_apk_dir, dex_dir): src = join(extracted_apk_dir, self._xzs_dir, self._xzs_filename) dest = join(dex_dir, self._xzs_filename) # Move secondary dexen shutil.move(src, dest) # concat_jar is a bunch of .dex.jar files concatenated together. concat_jar = join(dex_dir, 'secondary.dex.jar') cmd = 'cat {} | xz -d --threads 6 > {}'.format(dest, concat_jar) subprocess.check_call(cmd, shell=True) # Sizes of the concatenated .dex.jar files are stored in .meta files. # Read the sizes of each .dex.jar file and un-concatenate them. jar_size_regex = 'jar:(\d+)' secondary_dir = join(extracted_apk_dir, self._xzs_dir) jar_sizes = {} for i in range(1, 100): filename = 'secondary-' + str(i) + '.dex.jar.xzs.tmp~.meta' metadata_path = join(secondary_dir, filename) if isfile(metadata_path): with open(metadata_path) as f: jar_sizes[i] = \ int(re.match(jar_size_regex, f.read()).group(1)) os.remove(metadata_path) else: break with open(concat_jar, 'rb') as cj: for i in range(1, len(jar_sizes) + 1): jarpath = join(dex_dir, 'secondary-%d.dex.jar' % i) with open(jarpath, 'wb') as jar: jar.write(cj.read(jar_sizes[i])) for j in jar_sizes.keys(): assert jar_sizes[j] == getsize(dex_dir + '/secondary-' + str(j) + '.dex.jar') assert sum(jar_sizes.values()) == getsize(concat_jar) # Clean up everything other than dexen in the dex directory os.remove(concat_jar) os.remove(dest) # Lastly, unzip all the jar files and delete them for jarpath in abs_glob(dex_dir, '*.jar'): extract_dex_from_jar(jarpath, jarpath[:-4]) os.remove(jarpath) # Move primary dex shutil.move(join(extracted_apk_dir, 'classes.dex'), dex_dir)
def create_output_apk(extracted_apk_dir, output_apk_path, sign, keystore, key_alias, key_password, ignore_zipalign, page_align): # Remove old signature files for f in abs_glob(extracted_apk_dir, 'META-INF/*'): cert_path = join(extracted_apk_dir, f) if isfile(cert_path): os.remove(cert_path) directory = make_temp_dir('.redex_unaligned', False) unaligned_apk_path = join(directory, 'redex-unaligned.apk') if isfile(unaligned_apk_path): os.remove(unaligned_apk_path) # Create new zip file with zipfile.ZipFile(unaligned_apk_path, 'w') as unaligned_apk: for dirpath, _dirnames, filenames in os.walk(extracted_apk_dir): for filename in filenames: filepath = join(dirpath, filename) archivepath = filepath[len(extracted_apk_dir) + 1:] try: compress = per_file_compression[archivepath] except KeyError: compress = zipfile.ZIP_DEFLATED unaligned_apk.write(filepath, archivepath, compress_type=compress) # Add new signature if sign: sign_apk(keystore, key_password, key_alias, unaligned_apk_path) if isfile(output_apk_path): os.remove(output_apk_path) try: os.makedirs(dirname(output_apk_path)) except OSError as e: if e.errno != errno.EEXIST: raise zipalign(unaligned_apk_path, output_apk_path, ignore_zipalign, page_align)
def create_output_apk(extracted_apk_dir, output_apk_path, sign, keystore, key_alias, key_password): # Remove old signature files for f in abs_glob(extracted_apk_dir, 'META-INF/*'): cert_path = join(extracted_apk_dir, f) if isfile(cert_path): os.remove(cert_path) directory = make_temp_dir('.redex_unaligned', False) unaligned_apk_path = join(directory, 'redex-unaligned.apk') if isfile(unaligned_apk_path): os.remove(unaligned_apk_path) # Create new zip file with zipfile.ZipFile(unaligned_apk_path, 'w') as unaligned_apk: for dirpath, dirnames, filenames in os.walk(extracted_apk_dir): for filename in filenames: filepath = join(dirpath, filename) archivepath = filepath[len(extracted_apk_dir) + 1:] try: compress = per_file_compression[archivepath] except KeyError: compress = zipfile.ZIP_DEFLATED unaligned_apk.write(filepath, archivepath, compress_type=compress) # Add new signature if sign: subprocess.check_call([ 'jarsigner', '-sigalg', 'SHA1withRSA', '-digestalg', 'SHA1', '-keystore', keystore, '-storepass', key_password, unaligned_apk_path, key_alias], stdout=sys.stderr) if isfile(output_apk_path): os.remove(output_apk_path) zipalign(unaligned_apk_path, output_apk_path)
def detect(extracted_apk_dir): modules = [] for candidate in abs_glob(extracted_apk_dir, 'assets/*/metadata.txt'): with open(candidate) as metadata: name = None dependencies = [] canary_match = None canary_prefix = None for line in metadata.read().splitlines(): tokens = line.split() if tokens[0] == '.id': name = tokens[1] if tokens[0] == '.requires': dependencies.append(tokens[1]) if tokens[0][0] != '.': canary_match = re.search('([A-Za-z0-9]*)[.]dex[0-9]+[.]Canary', tokens[2]) if canary_match is not None: canary_prefix = canary_match.group(1) if name is not None: modules.append(ApplicationModule(extracted_apk_dir, name, canary_prefix, dependencies)) return modules
def repackage(self, extracted_apk_dir, dex_dir, have_locators, locator_store_id=0): BaseDexMode.repackage(self, extracted_apk_dir, dex_dir, have_locators) dex_sizes = {} jar_sizes = {} concat_jar_path = join(dex_dir, self._store_name + '.dex.jar') concat_jar_meta = join(dex_dir, 'metadata.txt') dex_metadata = DexMetadata(have_locators=have_locators, store=self._store_id, dependencies=self._dependencies, locator_store_id=locator_store_id) with open(concat_jar_path, 'wb') as concat_jar: for i in range(1, 100): oldpath = join(dex_dir, self._dex_prefix + '%d.dex' % (i + 1)) if not isfile(oldpath): break dexpath = join(dex_dir, self._store_name + '-%d.dex' % i) # Package each dex into a jar shutil.move(oldpath, dexpath) jarpath = dexpath + '.jar' create_dex_jar(jarpath, dexpath) dex_sizes[jarpath] = getsize(dexpath) jar_sizes[jarpath] = getsize(jarpath) # Concatenate the jar files and create corresponding metadata files with open(jarpath + '.xzs.tmp~.meta', 'w') as metadata: sizes = 'jar:{} dex:{}'.format(jar_sizes[jarpath], dex_sizes[jarpath]) metadata.write(sizes) with open(jarpath, 'rb') as jar: contents = jar.read() concat_jar.write(contents) sha1hash = hashlib.sha1(contents).hexdigest() dex_metadata.add_dex(jarpath + '.xzs.tmp~', BaseDexMode.get_canary(self, i), hash=sha1hash) dex_metadata.write(concat_jar_meta) assert getsize(concat_jar_path) == sum( getsize(x) for x in abs_glob(dex_dir, self._store_name + '-*.dex.jar')) # XZ-compress the result subprocess.check_call( ['xz', '-z6', '--check=crc32', '--threads=6', concat_jar_path]) # Copy all the archive and metadata back to the apk directory secondary_dex_dir = join(extracted_apk_dir, self._xzs_dir) for path in abs_glob(dex_dir, self._store_name + '*.meta'): shutil.copy(path, secondary_dex_dir) shutil.copy(concat_jar_meta, join(secondary_dex_dir, 'metadata.txt')) shutil.copy(concat_jar_path + '.xz', join(secondary_dex_dir, self._xzs_filename))
def repackage( self, extracted_apk_dir, dex_dir, have_locators, locator_store_id=0, fast_repackage=False, reset_timestamps=True, ): BaseDexMode.repackage( self, extracted_apk_dir, dex_dir, have_locators, fast_repackage, reset_timestamps, ) dex_sizes = {} jar_sizes = {} concat_jar_path = join(dex_dir, self._store_name + ".dex.jar") concat_jar_meta = join(dex_dir, "metadata.txt") dex_metadata = DexMetadata( have_locators=have_locators, store=self._store_id, dependencies=self._dependencies, locator_store_id=locator_store_id, ) with open(concat_jar_path, "wb") as concat_jar: for i in itertools.count(1): oldpath = join(dex_dir, self._dex_prefix + "%d.dex" % (i + 1)) if not isfile(oldpath): break dexpath = join(dex_dir, self._store_name + "-%d.dex" % i) # Package each dex into a jar shutil.move(oldpath, dexpath) jarpath = dexpath + ".jar" create_dex_jar(jarpath, dexpath, reset_timestamps=reset_timestamps) dex_sizes[jarpath] = getsize(dexpath) jar_sizes[jarpath] = getsize(jarpath) # Concatenate the jar files and create corresponding metadata files with open(jarpath + ".xzs.tmp~.meta", "w") as metadata: sizes = "jar:{} dex:{}".format(jar_sizes[jarpath], dex_sizes[jarpath]) metadata.write(sizes) with open(jarpath, "rb") as jar: contents = jar.read() concat_jar.write(contents) sha1hash = hashlib.sha1(contents).hexdigest() dex_metadata.add_dex( jarpath + ".xzs.tmp~", BaseDexMode.get_canary(self, i), hash=sha1hash, ) dex_metadata.write(concat_jar_meta) assert getsize(concat_jar_path) == sum( getsize(x) for x in abs_glob(dex_dir, self._store_name + "-*.dex.jar")) # XZ-compress the result compression_level = 0 if fast_repackage else 9 subprocess.check_call([ "xz", "-z%d" % compression_level, "--check=crc32", "--threads=6", concat_jar_path, ]) # Copy all the archive and metadata back to the apk directory secondary_dex_dir = join(extracted_apk_dir, self._xzs_dir) for path in abs_glob(dex_dir, self._store_name + "*.meta"): shutil.copy(path, secondary_dex_dir) shutil.copy(concat_jar_meta, join(secondary_dex_dir, "metadata.txt")) shutil.copy(concat_jar_path + ".xz", join(secondary_dex_dir, self._xzs_filename))
def unpackage(self, extracted_apk_dir, dex_dir, unpackage_metadata=False): src = join(extracted_apk_dir, self._xzs_dir, self._xzs_filename) dest = join(dex_dir, self._xzs_filename) # Move secondary dexen shutil.move(src, dest) # concat_jar is a bunch of .dex.jar files concatenated together. concat_jar = join(dex_dir, self._xzs_filename[:-4]) cmd = "cat {} | xz -d --threads 6 > {}".format(dest, concat_jar) subprocess.check_call(cmd, shell=True) if unpackage_metadata: shutil.copy(join(extracted_apk_dir, self._xzs_dir, "metadata.txt"), dex_dir) dex_order = [] with open(join(extracted_apk_dir, self._xzs_dir, "metadata.txt")) as dex_metadata: for line in dex_metadata.read().splitlines(): if line[0] != ".": tokens = line.split() search_pattern = self._store_name + r"-(\d+)\.dex\.jar\.xzs\.tmp~" match = re.search(search_pattern, tokens[0]) if match is None: raise Exception("unable to find match in " + tokens[0] + " for " + search_pattern) dex_order.append(int(match.group(1))) # Sizes of the concatenated .dex.jar files are stored in .meta files. # Read the sizes of each .dex.jar file and un-concatenate them. jar_size_regex = r"jar:(\d+)" secondary_dir = join(extracted_apk_dir, self._xzs_dir) jar_sizes = {} for i in dex_order: filename = self._store_name + "-%d.dex.jar.xzs.tmp~.meta" % i metadata_path = join(secondary_dir, filename) if isfile(metadata_path): with open(metadata_path) as f: jar_sizes[i] = int( re.match(jar_size_regex, f.read()).group(1)) os.remove(metadata_path) log("found jar " + filename + " of size " + str(jar_sizes[i])) else: break with open(concat_jar, "rb") as cj: for i in dex_order: jarpath = join(dex_dir, self._store_name + "-%d.dex.jar" % i) with open(jarpath, "wb") as jar: jar.write(cj.read(jar_sizes[i])) for j in jar_sizes.keys(): jar_size = getsize(dex_dir + "/" + self._store_name + "-" + str(j) + ".dex.jar") log("validating " + self._store_name + "-" + str(j) + ".dex.jar size=" + str(jar_size) + " expecting=" + str(jar_sizes[j])) assert jar_sizes[j] == jar_size assert sum(jar_sizes.values()) == getsize(concat_jar) # Clean up everything other than dexen in the dex directory os.remove(concat_jar) os.remove(dest) # Lastly, unzip all the jar files and delete them for jarpath in abs_glob(dex_dir, "*.jar"): extract_dex_from_jar(jarpath, jarpath[:-4]) os.remove(jarpath) BaseDexMode.unpackage(self, extracted_apk_dir, dex_dir)
def detect(self, extracted_apk_dir): secondary_dex_dir = join(extracted_apk_dir, self._secondary_dir) return len(list(abs_glob(secondary_dex_dir, "*.dex")))
def unpackage(self, extracted_apk_dir, dex_dir): src = join(extracted_apk_dir, self._xzs_dir, self._xzs_filename) dest = join(dex_dir, self._xzs_filename) # Move secondary dexen shutil.move(src, dest) # concat_jar is a bunch of .dex.jar files concatenated together. concat_jar = join(dex_dir, self._xzs_filename[:-4]) cmd = 'cat {} | xz -d --threads 6 > {}'.format(dest, concat_jar) subprocess.check_call(cmd, shell=True) dex_order = [] with open(join(extracted_apk_dir, self._xzs_dir, 'metadata.txt')) as dex_metadata: for line in dex_metadata.read().splitlines(): if line[0] != '.': tokens = line.split() search_pattern = self._store_name + '-(\d+)\.dex\.jar\.xzs\.tmp~' match = re.search(search_pattern, tokens[0]) if match is None: raise Exception('unable to find match in ' + tokens[0] + ' for ' + search_pattern) dex_order.append(int(match.group(1))) # Sizes of the concatenated .dex.jar files are stored in .meta files. # Read the sizes of each .dex.jar file and un-concatenate them. jar_size_regex = 'jar:(\d+)' secondary_dir = join(extracted_apk_dir, self._xzs_dir) jar_sizes = {} for i in dex_order: filename = self._store_name + '-%d.dex.jar.xzs.tmp~.meta' % i metadata_path = join(secondary_dir, filename) if isfile(metadata_path): with open(metadata_path) as f: jar_sizes[i] = \ int(re.match(jar_size_regex, f.read()).group(1)) os.remove(metadata_path) log('found jar ' + filename + ' of size ' + str(jar_sizes[i])) else: break with open(concat_jar, 'rb') as cj: for i in dex_order: jarpath = join(dex_dir, self._store_name + '-%d.dex.jar' % i) with open(jarpath, 'wb') as jar: jar.write(cj.read(jar_sizes[i])) for j in jar_sizes.keys(): jar_size = getsize(dex_dir + '/' + self._store_name + '-' + str(j) + '.dex.jar') log('validating ' + self._store_name + '-' + str(j) + '.dex.jar size=' + str(jar_size) + ' expecting=' + str(jar_sizes[j])) assert jar_sizes[j] == jar_size assert sum(jar_sizes.values()) == getsize(concat_jar) # Clean up everything other than dexen in the dex directory os.remove(concat_jar) os.remove(dest) # Lastly, unzip all the jar files and delete them for jarpath in abs_glob(dex_dir, '*.jar'): extract_dex_from_jar(jarpath, jarpath[:-4]) os.remove(jarpath) BaseDexMode.unpackage(self, extracted_apk_dir, dex_dir)
def repackage(self, extracted_apk_dir, dex_dir, have_locators): # Move primary dex shutil.move(join(dex_dir, 'classes.dex'), extracted_apk_dir) dex_sizes = {} jar_sizes = {} # Package each dex into a jar for i in range(1, 100): oldpath = join(dex_dir, 'classes%d.dex' % (i + 1)) if not isfile(oldpath): break dexpath = join(dex_dir, 'secondary-%d.dex' % i) shutil.move(oldpath, dexpath) jarpath = dexpath + '.jar' create_dex_jar(jarpath, dexpath) dex_sizes[jarpath] = getsize(dexpath) jar_sizes[jarpath] = getsize(jarpath) concat_jar_path = join(dex_dir, 'secondary.dex.jar') concat_jar_meta = join(dex_dir, 'metadata.txt') # Concatenate the jar files and create corresponding metadata files with open(concat_jar_path, 'wb') as concat_jar: with open(concat_jar_meta, 'w') as concat_meta: if have_locators: concat_meta.write('.locators\n') for i in range(1, 100): jarpath = join(dex_dir, 'secondary-%d.dex.jar' % i) if not isfile(jarpath): break with open(jarpath + '.xzs.tmp~.meta', 'wb') as metadata: sizes = 'jar:{} dex:{}'.format( jar_sizes[jarpath], dex_sizes[jarpath]) metadata.write(bytes(sizes, 'ascii')) with open(jarpath, 'rb') as jar: contents = jar.read() concat_jar.write(contents) sha1hash = hashlib.sha1(contents).hexdigest() concat_meta.write( '%s.xzs.tmp~ %s secondary.dex%02d.Canary\n' % (basename(jarpath), sha1hash, i)) assert getsize(concat_jar_path) == sum(getsize(x) for x in abs_glob(dex_dir, 'secondary-*.dex.jar')) # XZ-compress the result subprocess.check_call(['xz', '-z6', '--check=crc32', '--threads=6', concat_jar_path]) # Copy all the archive and metadata back to the apk directory secondary_dex_dir = join(extracted_apk_dir, self._xzs_dir) for path in abs_glob(dex_dir, '*.meta'): shutil.copy(path, secondary_dex_dir) shutil.copy(concat_jar_meta, join(secondary_dex_dir, 'metadata.txt')) shutil.copy(concat_jar_path + '.xz', join(secondary_dex_dir, self._xzs_filename))
def detect(self, extracted_apk_dir): secondary_dex_dir = join(extracted_apk_dir, self._secondary_dir) return isdir(secondary_dex_dir) and \ len(list(abs_glob(secondary_dex_dir, '*.dex.jar')))
def detect(self, extracted_apk_dir: str) -> bool: secondary_dex_dir = join(extracted_apk_dir, self._secondary_dir) return isdir(secondary_dex_dir) and len( list(abs_glob(secondary_dex_dir, "*.dex.jar")))