def build_session(): command_args = types.Arguments() def get_cert(): if not command_args.cert: return True return command_args.cert def get_proxies(): proxy_url = command_args.proxy if not proxy_url: return {} return { "http": proxy_url, "https": proxy_url, } session = requests.session() session.headers.update({ "PRIVATE-TOKEN": os.getenv(constants.Environment.gitlab_api_token()), "USER-AGENT": "token-hunter" }) session.proxies = get_proxies() session.verify = get_cert() return session
def analyze(): all_issues = [] all_comments = [] personal_projects = {} all_snippets = {} args = types.Arguments() for group in args.group: group_details = groups.get(group) if group_details is False: warning("[!] %s not found, skipping", group) continue group_projects = projects.all_group_projects(group) all_members = members.get_all(group) if args.members: for member in all_members: personal_projects.update(projects.all_member_projects(member)) all_projects = {**group_projects, **personal_projects} log_group(group_details) log_group_projects(group_projects) log_members(all_members) if args.members: log_members_projects(personal_projects) if args.snippets: info("[*] Fetching snippets for %s projects", len(all_projects)) all_snippets = snippets.get_all( [group_projects, personal_projects]) if args.issues: info("[*] Fetching issues & comments for %s projects", len(all_projects)) # loop each project (personal or group) for project_id, project_url in all_projects.items(): # loop each issue in the project and search for secrets in the description project_issues = issues.get_all(project_id, project_url) for issue in project_issues: all_issues.append(issue) # loop the comments for each issue searching for secrets in the body comments = issue_comments.get_all(project_id, issue.ident, issue.web_url) for comment in comments: all_comments.append(comment) get_snippet_secrets(all_snippets, all_projects, args) get_issues_comments_secrets(all_issues, all_comments, all_projects, args)
from logging import warning from api import gitlab from utilities import types, validate gitlab = gitlab.GitLab(types.Arguments().url) def get_all(project_id, project_url): issues = [] details = gitlab.get_issues(project_id) if validate.api_result(details): warning("[*] Found %s issues for project %s", len(details), project_url) for item in details: issues.append(types.Issue(item['iid'], item['web_url'], item['description'])) return issues def sniff_secrets(issue): monitor = types.SecretsMonitor() return monitor.sniff_secrets({issue.web_url: issue.description})
import datetime from api import identity from logging import info from utilities import types args = types.Arguments() def get_current(timezone): return datetime.datetime.now(timezone) \ .strftime("%d/%m/%Y %H:%M:%S") def log_time_stamp_start(): if not args.timestamp: info("##### token-hunter started at UTC %s from IP %s##### ", get_current(datetime.timezone.utc), identity.get_public_ip()) def log_time_stamp_end(): if not args.timestamp: info("##### token-hunter finished at UTC %s ##### ", get_current(datetime.timezone.utc))
def analyze(): all_issues = [] all_issue_comments = [] all_merge_requests = [] all_mr_comments = [] all_job_logs = [] all_members = [] target_projects = {} group_projects = {} group_details = {} personal_projects = {} all_snippets = {} args = types.Arguments() if args.group: iterator = args.group else: iterator = args.project for item in iterator: if args.group: group_details = groups.get(item) if not group_details: warning("[!] %s group not found, skipping", item) continue group_projects = projects.all_group_projects(item) if args.project: project_details = projects.project_details(item) if not project_details: warning("[!] %s project not found, skipping", item) continue target_projects.update( {project_details['id']: project_details['http_url_to_repo']}) if args.members: if args.group: all_members = members.get_all_group_members(item) else: all_members = members.get_all_project_members(item) for member in all_members: personal_projects.update(projects.all_member_projects(member)) all_projects = { **group_projects, **personal_projects, **target_projects } if args.group: log_group(group_details) log_group_projects(group_projects) if args.members: log_members(all_members) log_members_projects(personal_projects) if args.snippets: info("[*] Fetching snippets for %s projects", len(all_projects)) all_snippets = snippets.get_all( [group_projects, personal_projects]) if args.issues: info("[*] Fetching issues & comments for %s projects", len(all_projects)) # loop each project (personal or group) for project_id, project_url in all_projects.items(): # loop each issue in the project and search for secrets in the description project_issues = issues.get_all(project_id, project_url) for issue in project_issues: all_issues.append(issue) # loop the comments for each issue searching for secrets in the body comments = issue_comments.get_all(project_id, issue.ident, issue.web_url) for comment in comments: all_issue_comments.append(comment) if args.mergerequests: info("[*] Fetching merge requests discussions for %s projects", len(all_projects)) for project_id, project_url in all_projects.items(): project_merge_requests = merge_requests.get_all( project_id, project_url) for mr in project_merge_requests: all_merge_requests.append(mr) # loop the comments for each merge request searching for secrets in the body comments = merge_request_comments.get_all( project_id, mr.ident, mr.web_url) for comment in comments: all_mr_comments.append(comment) if args.jobs: info("[*] Fetching CI job logs for %s projects", len(all_projects)) for project_id, project_url in all_projects.items(): project_job_logs = job_logs.get_all(project_id, project_url) if len(project_job_logs) > 0: for log in project_job_logs: all_job_logs.append(log) get_snippet_secrets(all_snippets, all_projects, args) get_issues_comments_secrets(all_issues, all_issue_comments, all_projects, args) get_merge_reqs_comments_secrets(all_merge_requests, all_mr_comments, all_projects, args) get_job_log_secrets(all_job_logs, all_projects, args)
def args(): return types.Arguments()