예제 #1
0
def run(args):
    domain = args.domain
    thread_cnt = int(args.thread)
    dict_file = args.file
    outfile = args.out

    if not domain:
        print('usage: dnsburte.py -d aliyun.com')
        sys.exit(1)

    # init _cache_path
    script_path = os.path.dirname(os.path.abspath(__file__))
    _cache_path = os.path.join(script_path, 'result/{0}'.format(domain))
    if not os.path.exists(_cache_path):
        os.makedirs(_cache_path, 0777)

    subdomains = []
    dnsfuzz = DomainFuzzer(target=domain, dict_file=dict_file)

    logging.info("starting bruteforce threading({0}) : {1}".format(
        thread_cnt, domain))
    for subname in dnsfuzz.run(thread_cnt=thread_cnt):
        subdomains.append(subname)

    _cache_file = os.path.join(_cache_path, 'dnsburte.json')
    save_result(_cache_file, subdomains)

    script_path = os.path.dirname(os.path.abspath(__file__))
    _outfile_file = os.path.join(script_path, outfile)
    save_result(_outfile_file, subdomains)

    logging.info("dns bruteforce subdomains({0}) successfully...".format(
        len(subdomains)))
    logging.info("result save in : {0}".format(_outfile_file))
def run(args):
    domain = args.domain
    thread_cnt = int(args.thread)
    dict_file = args.file
    outfile = args.out

    if not domain:
        print('usage: dnsburte.py -d aliyun.com')
        sys.exit(1)

    # init _cache_path
    script_path = os.path.dirname(os.path.abspath(__file__))
    _cache_path = os.path.join(script_path, 'result/{0}'.format(domain))
    if not os.path.exists(_cache_path):
        os.makedirs(_cache_path, 0777)

    subdomains = []
    dnsfuzz = DomainFuzzer(target=domain, dict_file=dict_file)

    logging.info("starting bruteforce threading({0}) : {1}".format(thread_cnt, domain))
    for subname in dnsfuzz.run(thread_cnt=thread_cnt):
        subdomains.append(subname)

    _cache_file = os.path.join(_cache_path, 'dnsburte.json')
    save_result(_cache_file, subdomains)

    script_path = os.path.dirname(os.path.abspath(__file__))
    _outfile_file = os.path.join(script_path, outfile)
    save_result(_outfile_file, subdomains)    

    logging.info("dns bruteforce subdomains({0}) successfully...".format(len(subdomains)))
    logging.info("result save in : {0}".format(_outfile_file))
예제 #3
0
def make_query(**context):
    window_size = int(Variable.get('tolerance_window_size', 1))
    ti = context['ti']
    ts = context['ts']
    end_date = tzinfo.convert(dateutil.parser.parse(ts)).date()
    start_date = (end_date - datetime.timedelta(days=window_size)).strftime(DATE_FORMAT)
    end_date = end_date.strftime(DATE_FORMAT)
    ti.log.info(f'start: {start_date}, end: {end_date}')
    missing = missing_intervals(start_date, end_date)
    ti.log.info(f'{missing}')
    save_result(context, missing, extension='json')
예제 #4
0
def run(args):
    domain = args.domain
    if not domain:
        print('usage: wydomain.py -d aliyun.com')
        sys.exit(1)

    outfile = '{0}.log'.format(domain)

    script_path = os.path.dirname(os.path.abspath(__file__))
    _cache_path = os.path.join(script_path, 'result/{0}'.format(domain))
    if not os.path.exists(_cache_path):
        os.makedirs(_cache_path, 0777)

    # start crt
    print '[*]Starting Crt fetch ...'
    result = Crt(domain=domain).run()
    _cache_file = os.path.join(_cache_path, 'crt.json')
    save_result(_cache_file, result)
    print '\t[-]Fetch complete | Found {}'.format(len(result))

    # start ilink
    print '[*]Starting iLink fetch ...'
    result = Ilink(domain=domain).run()
    _cache_file = os.path.join(_cache_path, 'ilink.json')
    save_result(_cache_file, result)
    print '\t[-]Fetch complete | Found {}'.format(len(result))

    # new start brute
    print '[*]Starting Brute sub ...'
    result = BruteDns(domain=domain).run()
    _cache_file = os.path.join(_cache_path, 'brute.json')
    save_result(_cache_file, result)
    print '\n\t[-]Bruteforce complete | Found {}'.format(len(result))
    #

    _cache_files = ['crt.json', 'ilink.json', 'brute.json']

    subdomains = []

    for file in _cache_files:
        _cache_file = os.path.join(_cache_path, file)
        json_data = read_json(_cache_file)
        if json_data:
            subdomains.extend(json_data)

    subdomains = list(set(subdomains))

    _result_file = os.path.join(script_path, outfile)
    save_result(_result_file, subdomains)

    print '[*]{0} {1} subdomains save to {2}'.format(domain, len(subdomains),
                                                     _result_file)
예제 #5
0
def run(args):
    domain = args.domain
    outfile = args.out

    if not domain:
        print('usage: wydomain.py -d aliyun.com')
        sys.exit(1)

    # init _cache_path
    script_path = os.path.dirname(os.path.abspath(__file__))
    _cache_path = os.path.join(script_path, 'result/{0}'.format(domain))
    if not os.path.exists(_cache_path):
        os.makedirs(_cache_path, 0777)

    # alexa result json file
    logging.info("starting alexa fetcher...")
    _cache_file = os.path.join(_cache_path, 'alexa.json')
    result = Alexa(domain=domain).run()
    save_result(_cache_file, result)
    logging.info("alexa fetcher subdomains({0}) successfully...".format(len(result)))

    # threatminer result json file
    logging.info("starting threatminer fetcher...")
    _cache_file = os.path.join(_cache_path, 'threatminer.json')
    result = Threatminer(domain=domain).run()
    save_result(_cache_file, result)
    logging.info("threatminer fetcher subdomains({0}) successfully...".format(len(result)))

    # threatcrowd result json file
    logging.info("starting threatcrowd fetcher...")
    _cache_file = os.path.join(_cache_path, 'threatcrowd.json')
    result = Threatcrowd(domain=domain).run()
    save_result(_cache_file, result)
    logging.info("threatcrowd fetcher subdomains({0}) successfully...".format(len(result)))

    # sitedossier result json file
    logging.info("starting sitedossier fetcher...")
    _cache_file = os.path.join(_cache_path, 'sitedossier.json')
    result = Sitedossier(domain=domain).run()
    save_result(_cache_file, result)
    logging.info("sitedossier fetcher subdomains({0}) successfully...".format(len(result)))

    # netcraft result json file
    logging.info("starting netcraft fetcher...")
    _cache_file = os.path.join(_cache_path, 'netcraft.json')
    result = Netcraft(domain=domain).run()
    save_result(_cache_file, result)
    logging.info("netcraft fetcher subdomains({0}) successfully...".format(len(result)))

    # ilinks result json file
    logging.info("starting ilinks fetcher...")
    _cache_file = os.path.join(_cache_path, 'ilinks.json')
    result = Ilinks(domain=domain).run()
    save_result(_cache_file, result)
    logging.info("ilinks fetcher subdomains({0}) successfully...".format(len(result)))

    # chaxunla result json file
    logging.info("starting chaxunla fetcher...")
    _cache_file = os.path.join(_cache_path, 'chaxunla.json')
    result = Chaxunla(domain=domain).run()
    save_result(_cache_file, result)
    logging.info("chaxunla fetcher subdomains({0}) successfully...".format(len(result)))

    # google TransparencyReport result json file
    logging.info("starting google TransparencyReport fetcher...")
    result = TransparencyReport(domain=domain).run()
    _cache_file = os.path.join(_cache_path, 'googlect_subject.json')
    save_result(_cache_file, result.get('subjects'))
    _cache_file = os.path.join(_cache_path, 'googlect_dnsnames.json')
    save_result(_cache_file, result.get('dns_names'))
    logging.info("google TransparencyReport fetcher subdomains({0}) successfully...".format(len(result.get('dns_names'))))

    # Collection API Subdomains
    sub_files = [
        'alexa.json', 
        'chaxunla.json', 
        'ilinks.json', 
        'netcraft.json', 
        'sitedossier.json',
        'threatcrowd.json',
        'threatminer.json']

    # process all cache files
    subdomains = []
    for file in sub_files:
        _cache_file = os.path.join(_cache_path, file)
        json_data = read_json(_cache_file)
        if json_data:
            subdomains.extend(json_data)

    # process openssl x509 dns_names
    _cache_file = os.path.join(_cache_path, 'googlect_dnsnames.json')
    json_data = read_json(_cache_file)
    for sub in json_data:
        if sub.endswith(domain):
            subdomains.append(sub)

    # collection burte force subdomains
    _burte_file = os.path.join(_cache_path, 'dnsburte.json')
    if FileUtils.exists(_burte_file):
        json_data = read_json(_burte_file)
        if json_data:
            subdomains.extend(json_data)

    # save all subdomains to outfile
    subdomains = list(set(subdomains))
    _result_file = os.path.join(script_path, outfile)
    save_result(_result_file, subdomains)
    logging.info("{0} {1} subdomains save to {2}".format(
        domain, len(subdomains), _result_file))
def run(args):
    domain = args.domain
    outfile = args.domain + '_wy.txt'

    if not domain:
        print('usage: wydomain.py -d aliyun.com')
        sys.exit(1)

    # init _cache_path
    script_path = os.path.dirname(os.path.abspath(__file__))
    _cache_path = os.path.join(script_path, 'result/{0}'.format(domain))
    if not os.path.exists(_cache_path):
        os.makedirs(_cache_path, 0777)

    # alexa result json file
    logging.info("starting alexa fetcher...")
    _cache_file = os.path.join(_cache_path, 'alexa.json')
    result = Alexa(domain=domain).run()
    save_result(_cache_file, result)
    logging.info("alexa fetcher subdomains({0}) successfully...".format(len(result)))

    # threatminer result json file
    logging.info("starting threatminer fetcher...")
    _cache_file = os.path.join(_cache_path, 'threatminer.json')
    result = Threatminer(domain=domain).run()
    save_result(_cache_file, result)
    logging.info("threatminer fetcher subdomains({0}) successfully...".format(len(result)))

    # threatcrowd result json file
    logging.info("starting threatcrowd fetcher...")
    _cache_file = os.path.join(_cache_path, 'threatcrowd.json')
    result = Threatcrowd(domain=domain).run()
    save_result(_cache_file, result)
    logging.info("threatcrowd fetcher subdomains({0}) successfully...".format(len(result)))

    # sitedossier result json file
    logging.info("starting sitedossier fetcher...")
    _cache_file = os.path.join(_cache_path, 'sitedossier.json')
    result = Sitedossier(domain=domain).run()
    save_result(_cache_file, result)
    logging.info("sitedossier fetcher subdomains({0}) successfully...".format(len(result)))

    # netcraft result json file
    logging.info("starting netcraft fetcher...")
    _cache_file = os.path.join(_cache_path, 'netcraft.json')
    result = Netcraft(domain=domain).run()
    save_result(_cache_file, result)
    logging.info("netcraft fetcher subdomains({0}) successfully...".format(len(result)))

    # ilinks result json file
    logging.info("starting ilinks fetcher...")
    _cache_file = os.path.join(_cache_path, 'ilinks.json')
    result = Ilinks(domain=domain).run()
    save_result(_cache_file, result)
    logging.info("ilinks fetcher subdomains({0}) successfully...".format(len(result)))

    # chaxunla result json file
    logging.info("starting chaxunla fetcher...")
    _cache_file = os.path.join(_cache_path, 'chaxunla.json')
    result = Chaxunla(domain=domain).run()
    save_result(_cache_file, result)
    logging.info("chaxunla fetcher subdomains({0}) successfully...".format(len(result)))

    # google TransparencyReport result json file
    logging.info("starting google TransparencyReport fetcher...")
    result = TransparencyReport(domain=domain).run()
    _cache_file = os.path.join(_cache_path, 'googlect_subject.json')
    save_result(_cache_file, result.get('subjects'))
    _cache_file = os.path.join(_cache_path, 'googlect_dnsnames.json')
    save_result(_cache_file, result.get('dns_names'))
    logging.info("google TransparencyReport fetcher subdomains({0}) successfully...".format(len(result.get('dns_names'))))

    # Collection API Subdomains
    sub_files = [
        'alexa.json', 
        'chaxunla.json', 
        'ilinks.json', 
        'netcraft.json', 
        'sitedossier.json',
        'threatcrowd.json',
        'threatminer.json']

    # process all cache files
    subdomains = []
    for file in sub_files:
        _cache_file = os.path.join(_cache_path, file)
        json_data = read_json(_cache_file)
        if json_data:
            subdomains.extend(json_data)

    # process openssl x509 dns_names
    _cache_file = os.path.join(_cache_path, 'googlect_dnsnames.json')
    json_data = read_json(_cache_file)
    for sub in json_data:
        if sub.endswith(domain):
            subdomains.append(sub)

    # collection burte force subdomains
    _burte_file = os.path.join(_cache_path, 'dnsburte.json')
    if FileUtils.exists(_burte_file):
        json_data = read_json(_burte_file)
        if json_data:
            subdomains.extend(json_data)

    # save all subdomains to outfile
    subdomains = list(set(subdomains))
    _result_file = os.path.join(script_path, outfile)
    save_result(_result_file, subdomains)
    logging.info("{0} {1} subdomains save to {2}".format(
        domain, len(subdomains), _result_file))
예제 #7
0
domain = 'jd.com'

outfile = '{0}.log'.format(domain)
# script_path=os.getcwd() #E:\SubDomain

script_path = os.path.dirname(os.path.abspath(__file__))
# print script_path
#
_cache_path = os.path.join(script_path, 'result\{0}'.format(domain))
# print _cache_path
if not os.path.exists(_cache_path):
    os.makedirs(_cache_path, 0777)

result = Crt(domain=domain).run()
_cache_file = os.path.join(_cache_path, 'crt.json')
save_result(_cache_file, result)
# print result
#
result = Ilink(domain=domain).run()
_cache_file = os.path.join(_cache_path, 'ilink.json')
save_result(_cache_file, result)
# print result

result = BaiduSite(domain=domain, pages=10).run()
_cache_file = os.path.join(_cache_path, 'BaiduSite.json')
save_result(_cache_file, result)

# result = BruteDns(domain=domain).run()
# _cache_file = os.path.join(_cache_path,'BurteDns.json')
# save_result(_cache_file, result)
# # print result