def google_oauth_search(displaymode, page): lib.PrintStatus("Scanning for google OAUTH secrets...") pagetext = page.text gauth_pattern = r"(\"client_secret\":\"[a-zA-Z0-9-_]{24}\")" for k in re.findall(gauth_pattern, pagetext): if displaymode == 's' or 'b': lib.PrintHighSeverity('\nWarning: High Severity Item Found\n') gauth_output = f'{curdir}/Output/GoogleOAUTHSecrets.txt' if not exists(dirname(gauth_output)): try: makedirs(dirname(gauth_output)) except OSError as racecondition: if racecondition.errno != errno.EEXIST: raise with open(gauth_output, 'a') as gofile: gofile.write(f'Potential Secret: {k}\n') elif displaymode == 'p' or 'b': lib.PrintSuccess(f'Potential Secret: {k}') lib.PrintHighSeverity('\nWarning: High Severity Item Found\n')
def facebook_OAUTH(displaymode, page): lib.PrintStatus("Scanning for facebook OAUTH secrets...") pagetext = page.text fauth_pattern = r"[f|F][a|A][c|C][e|E][b|B][o|O][o|O][k|K].{0,30}['\"\\s][0-9a-f]{32}['\"\\s]" for k in re.findall(fauth_pattern, pagetext): if displaymode == 's' or 'b': lib.PrintHighSeverity('\nWarning: High Severity Item Found\n') fauth_output = f'{curdir}/Output/FacebookOAUTHSecrets.txt' if not exists(dirname(fauth_output)): try: makedirs(dirname(fauth_output)) except OSError as racecondition: if racecondition.errno != errno.EEXIST: raise with open(fauth_output, 'a') as gofile: gofile.write(f'Potential Secret: {k}\n') elif displaymode == 'p' or 'b': lib.PrintSuccess(f'Potential Secret: {k}') lib.PrintHighSeverity('\nWarning: High Severity Item Found\n')
def google_access_token_search(displaymode, page): lib.PrintStatus("Scanning for google access tokens...") pagetext = page.text gat_pattern = r'ya29.[0-9a-zA-Z_\\-]{68}' for k in re.findall(gat_pattern, pagetext): if displaymode == 's' or 'b': lib.PrintHighSeverity('\nWarning: High Severity Item Found\n') gat_output = f'{curdir}/Output/GoogleAccessPotentialTokens.txt' if not exists(dirname(gat_output)): try: makedirs(dirname(gat_output)) except OSError as racecondition: if racecondition.errno != errno.EEXIST: raise with open(gat_output, 'a') as gofile: gofile.write(f'Potential Token: {k}\n') elif displaymode == 'p' or 'b': lib.PrintSuccess(f'Potential Token: {k}') lib.PrintHighSeverity('\nWarning: High Severity Item Found\n')
def redis_search(displaymode, page, repo_crawl, verbosity): if repo_crawl is False: lib.PrintStatus("Scanning for Redis secrets...") elif repo_crawl is True and verbosity == 'on': lib.PrintStatus("Scanning for Redis secrets...") pagetext = page.text redis_pattern = r'redis://[0-9a-zA-Z:@.\\-]+' redis_artifacts = ['REDIS_PASSWORD', 'REDIS_CACHE_DATABASE', 'REDIS_HOST', 'REDIS_DATABASE'] for k in re.findall(redis_pattern, pagetext): if displaymode == 's' or 'b': lib.PrintHighSeverity('Warning: High Severity Item Found') redis_output = f'{curdir}/Output/Redis/RedisLinks.txt' if not exists(dirname(redis_output)): try: makedirs(dirname(redis_output)) except OSError as racecondition: if racecondition.errno != errno.EEXIST: raise with open(redis_output, 'a') as gofile: gofile.write(f'Potential link: {k}\n') elif displaymode == 'p' or 'b': lib.PrintSuccess(f'Potential link: {k}') lib.PrintHighSeverity('Warning: High Severity Item Found') for ra in set(redis_artifacts): if ra in pagetext: lib.PrintHighSeverity('Warning: High Severity Item Found') if displaymode == 's' or 'b': redis_artifacts_output = f'{curdir}/Output/Redis/RedisArtifacts.txt' if not exists(dirname(redis_artifacts_output)): try: makedirs(dirname(redis_artifacts_output)) except OSError as racecondition: if racecondition.errno != errno.EEXIST: raise with open(redis_artifacts_output, 'a') as rafile: rafile.write(f'Artifact found: {ra}') elif displaymode == 'p' or 'b': lib.PrintSuccess(f'Artifact Found: {ra}')
def AWS_search(displaymode, page): lib.PrintStatus("Searching for AWS Access Keys...") aws_pattern = r"AKIA[0-9A-Z]{16}" pagetext = page.text for k in re.findall(aws_pattern, pagetext): if displaymode == 's' or 'b': aws_output = f'{curdir}/Output/AWSPotentialTokens.txt' if not exists(dirname(aws_output)): try: makedirs(dirname(aws_output)) except OSError as racecondition: if racecondition.errno != errno.EEXIST: raise with open(aws_output, 'a') as gofile: gofile.write(f'Potential Tokens: {k}\n') elif displaymode == 'p' or 'b': lib.PrintSuccess(f'Potential Token: {k}') lib.PrintHighSeverity('\nWarning: High Severity Item Found\n')
def redis_search(displaymode, page): lib.PrintStatus("Scanning for Redis URLs...") pagetext = page.text redis_pattern = r'redis://[0-9a-zA-Z:@.\\-]+' for k in re.findall(redis_pattern, pagetext): if displaymode == 's' or 'b': redis_output = f'{curdir}/Output/RedisLinks.txt' if not exists(dirname(redis_output)): try: makedirs(dirname(redis_output)) except OSError as racecondition: if racecondition.errno != errno.EEXIST: raise with open(redis_output, 'a') as gofile: gofile.write(f'Potential link: {k}\n') elif displaymode == 'p' or 'b': lib.PrintSuccess(f'Potential link: {k}') lib.PrintHighSeverity('\nWarning: High Severity Item Found\n')
def ssh_keys_search(displaymode, page): lib.PrintStatus("Scanning for SSH Keys...") pagetext = page.text ssh_keys_identifiers = ["-----BEGIN OPENSSH PRIVATE KEY-----", "-----BEGIN DSA PRIVATE KEY-----", "-----BEGIN EC PRIVATE KEY-----"] for pattern in set(ssh_keys_identifiers): if pattern in pagetext: if displaymode == 's' or 'b': ssh_output = f'{curdir}/Output/SSHKeys.txt' if not exists(dirname(ssh_output)): try: makedirs(dirname(ssh_output)) except OSError as racecondition: if racecondition.errno != errno.EEXIST: raise with open(ssh_output, 'a') as gofile: gofile.write(f'SSH Key: {pattern}\n') elif displaymode == 'p' or 'b': lib.PrintSuccess(f'SSH Key: {pattern}') lib.PrintHighSeverity('\nWarning: High Severity Item Found\n')
def generic_search(key, displaymode, page, repo_crawl, verbosity): if repo_crawl is False: lib.PrintStatus(f"Searching for {key[0]} keys...") elif repo_crawl is True and verbosity == 'on': lib.PrintStatus(f"Searching for {key[0]} keys...") pagetext = page.text for k in re.findall(lib.patterns_dict[key], pagetext): if displaymode == 's' or 'b': if key[1] is True: lib.PrintHighSeverity("Warning: High Severity Item Found") gen_output = f'{curdir}/Output/{key[0]}.txt' if not exists(dirname(gen_output)): try: makedirs(dirname(gen_output)) except OSError as racecondition: if racecondition.errno != errno.EEXIST: raise with open(gen_output, 'a') as gofile: gofile.write(f'Potential Key: {k}\n') elif displaymode == 'p' or 'b': lib.PrintSuccess(f'Potential Key: {k}')
def misc_database_secrets(displaymode, page, repo_crawl, verbosity): if repo_crawl is False: lib.PrintStatus("Searching for miscellaneous database secrets...") elif repo_crawl is True and verbosity == 'on': lib.PrintStatus("Searching for miscellaneous database secrets...") pagetext = page.text database_secrets = ['DB_USER', 'DB_PASSWORD', 'SUPERUSER_NAME', 'SUPERUSER_PASSWORD', 'DB_NAME'] for ds in set(database_secrets): if ds in pagetext: lib.PrintHighSeverity('Warning: High Severity Item Found') if displaymode == 's' or 'b': db_output = f'{curdir}/Output/DatabaseSecrets.txt' if not exists(dirname(db_output)): try: makedirs(dirname(db_output)) except OSError as racecondition: if racecondition.errno != errno.EEXIST: raise with open(db_output, 'a') as gofile: gofile.write(f'Database Secret: {ds}\n') elif displaymode == 'p' or 'b': print(f"Database secret: {ds}")
def manual_setup(): while True: displaymode = input("[p]rint to screen, [s]ave to file, or [b]oth: ") if displaymode.lower() not in ['p', 's', 'b']: lib.PrintError("Invalid Input") continue break while True: scrape_input_method = input("[m]anual input (single url) or load from [f]ile: ") if scrape_input_method.lower() not in ['m', 'f']: lib.PrintError("Invalid Input") continue break while True: try: limiter = int(input("Enter the time between requests, in seconds: ")) if limiter < 0: continue break except ValueError: lib.PrintError("Invalid Input. Enter a positive integer.") continue lib.PrintStatus("\nIf provided links to one (or multiple) github profiles, Keyring can crawl all repositories for secrets.") lib.PrintStatus("However, this means Keyring WILL NOT FUNCTION CORRECTLY if provided links to other pages in the same text file.") lib.PrintStatus("Large profiles will also take a fairly long time, as Keyring fetches ALL files from ALL repos.\n") while True: repocrawlchoice = input("Enable repo crawling? [y]/[n]: ") if repocrawlchoice.lower() not in ['y', 'n']: lib.PrintError("Invalid Input.") continue elif repocrawlchoice.lower() == 'y': repo_crawl = True while True: lib.PrintHighSeverity("Warning: Turning on verbosity will output a LOT when spidering large profiles.") verbosity = input("Select verbosity for spidering: [off]/[on]: ") if verbosity.lower() not in ['off', 'on']: lib.PrintError("Invalid Input.") continue else: break break elif repocrawlchoice.lower() == 'n': repo_crawl = False verbosity = 'off' break while True: savechoice = input("Save choices as config file? [y]/[n]: ") if savechoice.lower() == 'n': break elif savechoice.lower() == 'y': while True: if isdir(f'{curdir}/KRconfig') is False: lib.PrintError(f"Config directory not detected in {curdir}...") lib.PrintStatus(f"Making config directory...") mkdir(f'{curdir}/KRconfig') break else: break configname = input("Enter the name for this configuration: ") with open(f'{curdir}/KRconfig/{configname}.ini', 'w') as cfile: cfile.write( f'''[initial_vars] displaymode = {displaymode} [scraping_vars] scrape_input_method = {scrape_input_method} limiter = {limiter} repo_crawl = {repo_crawl} verbosity = {verbosity} ''') break return displaymode, scrape_input_method, limiter, repo_crawl, verbosity
def manual_setup(): while True: displaymode = input("[p]rint to screen, [s]ave to file, or [b]oth: ") if displaymode == "": pass continue elif displaymode.lower() == 'p' or 's' or 'b': break else: lib.PrintError("Invalid Input.") continue while True: scrape_input_method = input("[m]anual input (single url) or load from [f]ile: ") if scrape_input_method.lower() == 'm' or 'f': break elif scrape_input_method == "": pass continue else: lib.PrintError("Invalid Input.") continue while True: try: limiter = int(input("Enter the time between requests, in seconds: ")) if limiter < 0: continue elif limiter == "": pass continue break except ValueError: lib.PrintError("Invalid Input. Enter a positive integer.") continue lib.PrintStatus("If provided links to one (or multiple) github profiles, Keyring can crawl all repositories for secrets.") lib.PrintStatus("If provided links to github repositories, Keyring can crawl all files in that repository.") lib.PrintStatus("However, this means Keyring WILL NOT FUNCTION CORRECTLY if provided links to other pages in the same text file, or if profile and repo links are mixed.") lib.PrintStatus("Large profiles will also take a fairly long time, as Keyring fetches ALL files from ALL repos.") while True: repocrawlchoice = input("Enable repo crawling? [y]/[n]: ") if repocrawlchoice == "": pass continue elif repocrawlchoice.lower() == 'y': repo_crawl = True while True: lib.PrintHighSeverity("Warning: Turning on verbosity will output a LOT when spidering large profiles.") verbosity = input("Enable verbosity for spidering: [y]/[n]: ") if verbosity == "": pass continue elif verbosity.lower() == 'y' or 'n': break else: lib.PrintError("Invalid Input.") continue while True: link_type_input = input("Github [p]rofile links or Github [r]epository links?: ") if link_type_input == "": pass continue elif link_type_input.lower() == 'p': link_type = 'profile' break elif link_type_input.lower() == 'r': link_type = 'repo' break else: lib.PrintError("Invalid Input.") continue while True: lib.PrintStatus("Repositories may contain large directories with no value in crawling, such as dependency folders.") directory_filtering_status = input("Enable directory filtering: [y]/[n]: ") if directory_filtering_status.lower() == 'y': directory_filtering = True blacklisted_directories = [] blacklisted_directory_input = input("Enter the directory names you wish to filter (separated by a single comma): ").split(',') for directory in blacklisted_directory_input: blacklisted_directories.append(directory) break elif directory_filtering_status.lower() == 'n': directory_filtering = False blacklisted_directories = [] #placeholder for configparser break elif directory_filtering_status == "": pass continue else: lib.PrintError("Invalid Input.") continue break elif repocrawlchoice.lower() == 'n': repo_crawl = False link_type = 'regular' directory_filtering = False blacklisted_directories = [] verbosity = 'off' break else: lib.PrintError("Invalid Input.") continue while True: savechoice = input("Save choices as config file? [y]/[n]: ") if savechoice.lower() == 'n': break elif savechoice.lower() == 'y': while True: if isdir(f'{curdir}/KRconfig') is False: lib.PrintError(f"Config directory not detected in {curdir}...") lib.PrintStatus(f"Making config directory...") mkdir(f'{curdir}/KRconfig') break else: break configname = input("Enter the name for this configuration: ") with open(f'{curdir}/KRconfig/{configname}.ini', 'w') as cfile: cfile.write( f'''[initial_vars] displaymode = {displaymode} [scraping_vars] scrape_input_method = {scrape_input_method} limiter = {limiter} repo_crawl = {repo_crawl} link_type = {link_type} directory_filtering = {directory_filtering} blacklisted_directories = {blacklisted_directories} verbosity = {verbosity} ''') else: lib.PrintError("Invalid Input.") continue break return displaymode, scrape_input_method, limiter, repo_crawl, link_type, directory_filtering, blacklisted_directories, verbosity