def generate_auth_token(section, config_file=CREDENTIALS_FILE): def read_two_factor(): code = '' while not code: code = raw_input('Enter 2FA code: ') return code c = ConfigParser() c.add_section(section) username = raw_input('Enter GitHub username: '******'Enter GitHub password for {0}: '.format(username)) enterprise_url = raw_input( 'Enterprise URL (leave empty if using github.com): ') if enterprise_url: g = GitHubEnterprise(enterprise_url) auth = g.authorize(username, password, DEFAULT_SCOPE, OAUTH_NAME, OAUTH_SITE) c.set(section, 'url', enterprise_url) else: g = GitHub() auth = authorize(username, password, DEFAULT_SCOPE, OAUTH_NAME, OAUTH_SITE, two_factor_callback=read_two_factor) c.set(section, 'token', auth.token) with open(CREDENTIALS_FILE, 'a+') as f: c.write(f)
def __init__(self, conc=200, procs=4): self.RateWarning = False self.conc = conc self.procs = procs #GitHub API wrapper self.GH = GitHubEnterprise(os.getenv("GITHUB_URL"), token=os.getenv("GITHUB_AUTH"), verify=False) self.FILESCANNER = Scanner("./modules", "modules.json")
def __init__(self, public_gh_token, enterprise_gh_creds=None): """ """ self._public_token = public_gh_token self._gh = login(token=public_gh_token) if enterprise_gh_creds: self._enterprise_login = enterprise_gh_creds[0] self._enterprise_token = enterprise_gh_creds[1] gh = GitHubEnterprise("https://github.ibm.com") self._gh_ent = gh.login(enterprise_gh_creds[0], password=enterprise_gh_creds[1])
def main(): args = parse_args() c = ConfigParser() # Check if token already exists or generate one c.read(args.file) if args.profile not in c.sections(): print Colors.warn( "Could not find profile '%s' in '%s', generating new credentials." % (args.profile, args.file)) token = generate_auth_token(args.profile, args.file) c.read(args.file) profile = c[args.profile] enterprise_url = profile.get('url', None) token = profile.get('token') if enterprise_url: g = GitHubEnterprise(enterprise_url, token=token) else: g = GitHub(token=token) args.command(args, handle=g)
def client(self): """Unauthenticated GitHub client""" if current_app.config.get("GITHUBAPP_URL"): return GitHubEnterprise( current_app.config["GITHUBAPP_URL"], verify=current_app.config["VERIFY_SSL"], ) return GitHub()
def __init__(self, url_github='https://github.com'): """ Create Github object Enterprise or public Github (default) Auth from token if available Else from username/pwd """ self.url_github = url_github load_dotenv(dotenv_path='credentials.env') username = os.environ.get('github-username') password = os.environ.get('github-password') token = os.environ.get('github-token') if url_github == 'https://github.com': if (token is not None): self.api = GitHub(token=token) self.test_connection('Wrong username/password') elif (username is not None) and (password is not None): self.api = GitHub(username=username, password=password) self.test_connection('Scopes must be "read:user, repo" at least') else: raise Exception( 'No credentials provided: They should be in a "credentials.env" file') else: if (token is not None): self.api = GitHubEnterprise(url=url_github, token=token) self.test_connection('Wrong username/password') elif (username is not None) and (password is not None): self.api = GitHubEnterprise(url=url_github, username=username, password=password) self.test_connection('Scopes must be "read:user, repo" at least') else: raise Exception( 'No credentials provided: They should be in a "credentials.env" file')
def client(self): """Unauthenticated GitHub client""" if self._app.config.get('GITHUBAPP_URL'): return GitHubEnterprise(self._app.config['GITHUBAPP_URL']) return GitHub()
def get_gh_client(self, url, key): if url: return GitHubEnterprise(url, token=key) return GitHub(token=key)
else: release_repos.append(element) return release_repos except: print("ファイルが存在しません ", repository_list_file) return release_repos def print_all_releases(repo): for rel in repo.releases(): print("print relese: ", rel.tag_name) file_name = 'rels/' + rel.tag_name f = open(file_name, 'w') f.write(rel.body) print(">>>write: ", file_name) f.close() if __name__ == '__main__': gh = GitHubEnterprise(url=html_url, token=token) org = gh.organization(org_name) repos = org.repositories() # get all repositories in organization rel_repos = read_file(repository_list_file) # read release repository if len(rel_repos) == 0: print("exit") sys.exit(1) for repo in repos: if repo.name in rel_repos: # get only if release_repos print(">>> --- start ", repo.name, " ---") print_all_releases(repo)
""" # Make sure we're up to date git.fetch() # Make sure we can do a clean checkout git.reset(hard=True) git.clean('-dfx') git.checkout('origin/' + base) # Merge the working tree, but don't modify the index git.merge('origin/' + head, no_commit=True) # Check the PR! check() def check_open_pull_requests(repo): with temp_chdir(env.WORK_TREE): for pr in repo.pull_requests(state='open'): print('Checking pull request #{}...'.format(pr.number)) try: merge_and_check(pr.base.ref, pr.head.ref) except ErrorReturnCode as e: print('✘ Problem: {}'.format(e), e.exit_code) else: print('✔ Looks good!') if __name__ == '__main__': gh = GitHubEnterprise(env.GITHUB_URL, env.GITHUB_USERNAME, env.GITHUB_TOKEN) repo = gh.repository(env.GITHUB_OWNER, env.GITHUB_REPO) check_open_pull_requests(repo)
from datetime import datetime from subprocess import Popen, PIPE #change this to your username USER_NAME = '' # access token stored in file in home directory called .gt_git_credentials CREDENTIALS_FILE = path.expanduser("~") + "/.gt_git_credentials" SRC_DIR = path.expanduser("~/src/gt-github") ERROR_LOG = SRC_DIR + "/gt_git_error.log" with open(CREDENTIALS_FILE, 'r') as fd: token = fd.readline().strip() error_log = open(ERROR_LOG, 'a+') gt_gh = GitHubEnterprise('https://github.gatech.edu') gt_gh.login(USER_NAME, token=token) repos = gt_gh.repositories() chdir(SRC_DIR) try: for r in repos: output, err = "", "" repo_path = SRC_DIR + '/' + str(r).split('/')[-1] if not path.isdir(repo_path): makedirs(repo_path) chdir(SRC_DIR) process = Popen( ['git', 'clone', 'https://github.gatech.edu/' + str(r)], stdout=PIPE, stderr=PIPE)
""" # Make sure we're up to date git.fetch() # Make sure we can do a clean checkout git.reset(hard=True) git.clean('-dfx') git.checkout('origin/' + base) # Merge the working tree, but don't modify the index git.merge('origin/' + head, no_commit=True) # Check the PR! check() def check_open_pull_requests(repo): with temp_chdir(env.WORK_TREE): for pr in repo.pull_requests(state='open'): print('Checking pull request #{}...'.format(pr.number)) try: merge_and_check(pr.base.ref, pr.head.ref) except ErrorReturnCode as e: print('✘ Problem: {}'.format(e), e.exit_code) else: print('✔ Looks good!') if __name__ == '__main__': gh = GitHubEnterprise( env.GITHUB_URL, env.GITHUB_USERNAME, env.GITHUB_TOKEN) repo = gh.repository(env.GITHUB_OWNER, env.GITHUB_REPO) check_open_pull_requests(repo)
class Github: """ """ def __init__(self, url_github='https://github.com'): """ Create Github object Enterprise or public Github (default) Auth from token if available Else from username/pwd """ self.url_github = url_github load_dotenv(dotenv_path='credentials.env') username = os.environ.get('github-username') password = os.environ.get('github-password') token = os.environ.get('github-token') if url_github == 'https://github.com': if (token is not None): self.api = GitHub(token=token) self.test_connection('Wrong username/password') elif (username is not None) and (password is not None): self.api = GitHub(username=username, password=password) self.test_connection('Scopes must be "read:user, repo" at least') else: raise Exception( 'No credentials provided: They should be in a "credentials.env" file') else: if (token is not None): self.api = GitHubEnterprise(url=url_github, token=token) self.test_connection('Wrong username/password') elif (username is not None) and (password is not None): self.api = GitHubEnterprise(url=url_github, username=username, password=password) self.test_connection('Scopes must be "read:user, repo" at least') else: raise Exception( 'No credentials provided: They should be in a "credentials.env" file') def test_connection(self, message): """ """ print('testing connection to {}'.format(self.url_github)) try: self.api.me() except: raise Exception(message) def load_repo(self, orga_name, repo_name): """ """ self.repo = self.api.repository(orga_name, repo_name) def create_file(self, path=None, message=None, content=None, branch=None): """ """ li_branch = [e.name for e in self.repo.branches()] assert branch in li_branch, 'branch {} does not exist'.format(branch) try: res = self.repo.create_file(path=path, message=message, content=content, branch=branch) print('file {} created'.format(path)) return res except Exception as e: raise Exception('create_file failed: {}'.format(e)) def update_file(self, path=None, message=None, content=None, branch=None): """ """ dir_contents = self.repo.directory_contents('.', return_as=dict, ref=branch) target_file = dir_contents[path] try: res = target_file.update(message, content, branch=branch) print('file {} updated'.format(path)) return res except Exception as e: raise Exception('create_file failed: {}'.format(e))
class GHScanner: def __init__(self, conc=200, procs=4): self.RateWarning = False self.conc = conc self.procs = procs #GitHub API wrapper self.GH = GitHubEnterprise(os.getenv("GITHUB_URL"), token=os.getenv("GITHUB_AUTH"), verify=False) self.FILESCANNER = Scanner("./modules", "modules.json") #scans all orgs in git server def scan_all_orgs(self): starttime = time.time() results = { 'orgs_scanned': 0, 'repos_scanned': 0, 'vulnerable': 0, 'sus': 0, 'time_elapsed': 0, 'orgs': [] } print("Retrieving org list...") orgslist = self.check_orgs() print(f"Done - {len(orgslist)} items retrieved!") try: #chunk the list of orgs for co-processing orgchunks = list(self.chunks(orgslist, self.procs)) processes = [] rets = [] #run each chunk with a different process resultqueue = multiprocessing.Queue() for chunk in orgchunks: tmp = multiprocessing.Process(target=self.check_org_chunk, args=(resultqueue, chunk, self.conc, self.procs)) processes.append(tmp) tmp.start() for process in processes: res = resultqueue.get() rets = rets + res for process in processes: process.join() results['orgs'] = rets #error check for org in results['orgs']: if 'errors' in org: for repo in org['errors']: print(f"Retrying: {repo}...") tmp = self.check_single_repo(org['org'], repo) index = next((index for (index, d) in enumerate(org['repos']) if d["repo"] == repo), None) org['repos'][index] = tmp #do recap results['time_elapsed'] = time.time() - starttime results['orgs_scanned'] = len(orgslist) return results except Exception as e: print(f"Error: {e} in scan_all_orgs") # get list of orgs def check_orgs(self): results = [] try: orgs = self.GH.all_organizations() for org in orgs: results.append(org.login) except Exception as e: #print(f"Error: {e} in check_orgs") raise return results #checks a single gh organization def check_single_org(self, org): jsonresult = {'org': org, 'repos': [], 'errors': []} starttime = time.time() try: #load up the repos for this org repos = self.check_repos(org) #check each repo with a new thread (up to n=conc threads) with concurrent.futures.ThreadPoolExecutor( max_workers=self.conc) as executor: fut = [ executor.submit(self.check_single_repo, org, repository) for repository in repos ] for r in concurrent.futures.as_completed(fut): #if there is an error, ad it to the error list scanresult = r.result() if 'errors' in scanresult: jsonresult['errors'].append(scanresult['repo']) jsonresult['repos'].append(scanresult) except Exception as e: print(f"Error: {e} in check_single_org({org})") jsonresult['errors'].append(f"check_single_org({org})") if len(jsonresult['errors']) == 0: del jsonresult['errors'] jsonresult['scan_time'] = time.time() - starttime jsonresult['repos'] = sorted(jsonresult['repos'], key=lambda i: str.casefold(i['repo'])) return jsonresult # gets a list of repos for a git org def check_repos(self, org): ret = [] organization = self.GH.organization(org) repos = organization.repositories(type="all") try: for repo in repos: ret.append(repo.name) except Exception as e: print(f"Error: {e} in check_repos") raise return ret # checks a single repo for dependency confusion (now with threading!) def check_single_repo(self, org, repo): jsonresult = {'repo': repo, 'files': [], 'errors': []} try: #check rate limits and sleep if need be core = self.GH.rate_limit()['resources']['core'] if int(core['remaining']) < 500: resettime = int(core['reset']) sleepamount = resettime - int(time.time()) #if we havent said we are pausing yet, do so now if not self.RateWarning: print( f"GIT API RATE LIMIT HIT, SLEEPING FOR: {sleepamount} seconds" ) self.RateWarning = True #pause until the rate limiter resets time.sleep(sleepamount + 2) self.RateWarning = False repository = self.GH.repository(org, repo) #grab packages from this repo and pull the dependencies from them files = self.check_repo(repository) filecontents = self.get_all_manifest_contents(files, repository) for file in filecontents: if not file['override']: #scan it scanresult = self.FILESCANNER.scan_contents( file['file'], file['content']) else: scanresult = { 'result': { 'file': file['file'], 'vulnerable': [], 'sus': [], 'override': True } } #if we had errors, bubble them up if 'errors' in scanresult: jsonresult['errors'].append(scanresult['errors']) else: jsonresult['files'].append(scanresult['result']) #remove empty errors if len(jsonresult['errors']) == 0: del jsonresult['errors'] jsonresult['files'] = sorted(jsonresult['files'], key=lambda i: str.casefold(i['file'])) except Exception as e: if "new thread" not in str(e) and "repository is empty" not in str( e): print(f"{org} : {repo} : Error: {e} in check_single_repo") return jsonresult #traverses a git repo and finds manifest files def check_repo(self, repo): files = [] try: contents = repo.directory_contents("", return_as=dict) overrides = [] for file in contents: f = contents[file] for module in self.FILESCANNER.MODULES['modules']: if f.path.lower( ) in module['manifest_file'] or f.path.lower( ) in module['lock_file'] or ('config_file' in module and f.path.lower() == module['config_file']): if 'config_file' in module and f.path.lower( ) == module['config_file']: if module['config_parse_func']( get_single_manifest_contents( repo, { 'name': f.path, 'override': False })): overrides = overrides + module[ 'manifest_file'] + module['lock_file'] else: files.append({'name': f.path, 'override': False}) if f.path.lower() in module['lock_file']: for file in module['manifest_file'] + module[ 'lock_file'][:-1]: if not f.path.lower() == file.lower(): overrides.append(file) break overrides = list(set(overrides)) for f in files: if f['name'] in overrides: f['override'] = True except Exception as e: #print(f"Error: {e} in check_repo") raise return files #grabs manifest file contents from git (but with threads this time!) def get_single_manifest_contents(self, repo, file): try: if file['override']: return {'file': file['name'], 'content': '', 'override': True} content = repo.file_contents(file['name']).decoded.decode("utf-8") return { 'file': file['name'], 'content': content, 'override': False } except Exception as e: #print(f"Error: {e} in ({filename}) get_single_manifest_contents") raise #grabs all manifest file contents from git def get_all_manifest_contents(self, files, repo): if not files or len(files) == 0: return [] filecontents = [] try: with concurrent.futures.ThreadPoolExecutor( max_workers=5) as executor: fut = [ executor.submit(self.get_single_manifest_contents, repo, file) for file in files ] for r in concurrent.futures.as_completed(fut): tmp = r.result() if tmp is not None: filecontents.append(r.result()) except Exception as e: #print(f"Error: {e} in get_all_manifest_contents") raise return filecontents #Yield n number of striped chunks from l. @staticmethod def chunks(l, n): for i in range(0, n): yield l[i::n] #checks a list of orgs for dependency confusion @staticmethod def check_org_chunk(resultqueue, orgs, conc, procs): results = [] try: ghscanner = GHScanner(conc, procs) for org in orgs: res = ghscanner.check_single_org(org) results.append(res) print(f"{org} ({res['scan_time']})") except Exception as e: print(f"Error: {e} in check_org_chunk") resultqueue.put(results) #get recap info for the dac.py file @staticmethod def get_dac_recap(results): r = 0 v = 0 s = 0 for org in results['orgs']: r += len(org['repos']) for repo in org['repos']: for file in repo['files']: v += len(file['vulnerable']) s += len(file['sus']) return {'repos_scanned': r, 'vulnerable': v, 'sus': s} #writes json output to filename @staticmethod def write_output_file(resultsfile, resultsjson, print_name=True): try: jsonoutput = json.dumps(resultsjson, indent=4) with open(resultsfile, "w") as file: file.write(jsonoutput) if print_name: print(os.path.realpath(resultsfile)) except Exception as e: print(f"Error: {e} in write_output_file")