def find_issues_uneven_person_commit(repo):

	committer_map = {}
	person_map = anonymize_persons()

	repo = github3.repository(repo[0], repo[1])
	all_commits = repo.commits()

	for commit in all_commits:
		committer = commit.commit.committer["name"]
		
		if committer_map.has_key(person_map[committer]):
			committer_map[person_map[committer]] += 1
		else:
			committer_map[person_map[committer]] =1

	f = open('./features/repo_'+str(repo_count)+'_uneven_person_commits.csv', 'wt')
	
	try:
		writer = csv.writer(f)
		writer.writerow( ('Committer', 'Commit_Count'))
	
		for (key, value) in committer_map.items():
			writer.writerow( (key, value))

	finally:
	    f.close()
def find_issues_uneven_weekly_commits(repo):

    repo = github3.repository(repo[0], repo[1])
    all_milestones = repo.milestones(state='closed',
                                     sort='due_date',
                                     direction='asc')

    for per in xrange(1, 5):
        f = open(
            './features/repo_' + str(repo_count) +
            '_early_smoke_milestone_completion_trend.csv', 'wt')

        try:
            writer = csv.writer(f)
            writer.writerow(
                ('Milestone_No', 'Creation_Day', 'Due_Day', 'Closed_Day'))
            relative_date = dateutil.parser.parse('2015-12-31T00:00:01Z')

            for milestone in all_milestones:
                if milestone.due_on is not None:
                    milestone_no = milestone.number
                    creation_day = (milestone.created_at - relative_date).days
                    due_day = (milestone.due_on - relative_date).days
                    closed_day = (dateutil.parser.parse(milestone.closed_at) -
                                  relative_date).days
                    #print closed_day
                    writer.writerow(
                        (milestone_no, creation_day, due_day, closed_day))

        finally:
            f.close()
Esempio n. 3
0
def get_issues_list(request,user,repo, format=None):
	if request.method == 'GET':
		
		count_dict = {}   #response object with individual count
		count_dict["day"] = 0
		count_dict["week"] = 0
		count_dict["longer"] = 0
		
		today = datetime.now()
		github = github3.login("gc-Held", "secret_password") # user credentials
		repo = github3.repository(user, repo)   #opens the repository 
		open_issues = [i for i in repo.iter_issues()] # lists all the issues
		count_dict["total"] = len(open_issues)
		for i in open_issues:
			
			if i.pull_request == None:	#filters all pull requests
				created_at  = i.created_at.replace(tzinfo=None) #makes the datetime naive if TZ
				dt = today - created_at
				if dt.days == 0:
					count_dict["day"]+=1
				elif dt.days > 0 and dt.days < 7:
					count_dict["week"]+=1
				else:
					count_dict["longer"]+=1
			else:
				count_dict["total"]-=1
		
		return Response(count_dict)
def find_issues_uneven_weekly_commits(repo):

	repo = github3.repository(repo[0], repo[1])
	all_milestones = repo.milestones(state='closed', sort='due_date', direction='asc')

	for per in xrange(1,5):
		f = open('./features/repo_'+str(repo_count)+'_early_smoke_milestone_completion_trend.csv', 'wt')
		
		try:
			writer = csv.writer(f)
			writer.writerow( ('Milestone_No', 'Creation_Day', 'Due_Day', 'Closed_Day'))
			relative_date = dateutil.parser.parse('2015-12-31T00:00:01Z')
				

			for milestone in all_milestones:
				if milestone.due_on is not None:
					milestone_no = milestone.number
					creation_day = (milestone.created_at - relative_date).days
					due_day = (milestone.due_on - relative_date).days
					closed_day = (dateutil.parser.parse(milestone.closed_at) - relative_date).days
					#print closed_day
					writer.writerow( (milestone_no, creation_day, due_day, closed_day))

		finally:
			f.close()
Esempio n. 5
0
    def find_project(self):
        if self.project_owner:
            project = repository(repository=self.project_name, owner=self.project_owner)
            if project:
                self.found_project = project
                return
            self._error_nothing_found(True)

        search_result = list(self._client.search_repositories(
            'language:python {}'.format(self.project_name), number=10))

        repositories_count = len(search_result)

        if repositories_count == 1:
            self.found_project = search_result[0].repository
            self.printer.print('Found {}'.format(self.found_project))
            return

        if repositories_count == 0:
            self._error_nothing_found()

        repos = '\n'.join(['[{}]{}/{}'.format(i, r.repository.owner.login, r.repository.name)
                           for i, r in enumerate(search_result, start=1)])
        message = 'Found {count} projects:\n{repos}\n'.format(count=repositories_count, repos=repos)

        self.printer.print(message)
        self.found_project = search_result[self._ask(repos)].repository
def find_issues_uneven_person_commit(repo):

    committer_map = {}
    person_map = anonymize_persons()

    repo = github3.repository(repo[0], repo[1])
    all_commits = repo.commits()

    for commit in all_commits:
        committer = commit.commit.committer["name"]

        if committer_map.has_key(person_map[committer]):
            committer_map[person_map[committer]] += 1
        else:
            committer_map[person_map[committer]] = 1

    f = open(
        './features/repo_' + str(repo_count) + '_uneven_person_commits.csv',
        'wt')

    try:
        writer = csv.writer(f)
        writer.writerow(('Committer', 'Commit_Count'))

        for (key, value) in committer_map.items():
            writer.writerow((key, value))

    finally:
        f.close()
def find_issues_uneven_weekly_commits(repo):

    weekly_commit_count = {}

    for i in xrange(1, 15):
        weekly_commit_count.setdefault(i, 0)

    repo = github3.repository(repo[0], repo[1])
    all_commits = repo.commits()

    for commit in all_commits:
        commit_date = commit.commit.committer["date"]
        week_no = dateutil.parser.parse(commit_date).isocalendar()[1]
        #print repo.commit(commit1.sha).commit.committer["date"]

        if weekly_commit_count.has_key(week_no):
            weekly_commit_count[week_no] += 1
        else:
            weekly_commit_count[week_no] = 1

    f = open(
        './features/repo_' + str(repo_count) + '_uneven_weekly_commits.csv',
        'wt')

    try:
        writer = csv.writer(f)
        writer.writerow(('Week_No', 'Commit_Count'))

        for (key, value) in weekly_commit_count.items():
            writer.writerow((key, value))

    finally:
        f.close()
Esempio n. 8
0
def checkout_pr_branch(local_parent_dir, pr_number, source_repo_name,
                       target_repo_name):
    os.chdir(local_parent_dir)
    src_repo = github3.repository('dimagi', source_repo_name)
    target_repo = github3.repository('dimagi', target_repo_name)

    src_pr = src_repo.pull_request(pr_number)
    checkout_branch(src_repo.name, src_pr.head.ref)
    cross_branch = get_cross_branch(target_repo, src_pr)

    if not os.path.exists(target_repo.name):
        print("Checking out {} for {}".format(cross_branch, target_repo_name))
        subprocess.call('git clone {}'.format(target_repo.clone_url),
                        shell=True)

    checkout_branch(target_repo.name, cross_branch)
    os.chdir(local_parent_dir)
def checkout_pr_branch(local_parent_dir, pr_number,
                       source_repo_name, target_repo_name):
    os.chdir(local_parent_dir)
    src_repo = github3.repository('dimagi', source_repo_name)
    target_repo = github3.repository('dimagi', target_repo_name)

    src_pr = src_repo.pull_request(pr_number)
    checkout_branch(src_repo.name, src_pr.head.ref)
    cross_branch = get_cross_branch(target_repo, src_pr)

    if not os.path.exists(target_repo.name):
        print("Checking out {} for {}".format(cross_branch, target_repo_name))
        subprocess.call('git clone {}'.format(target_repo.clone_url),
                        shell=True)

    checkout_branch(target_repo.name, cross_branch)
    os.chdir(local_parent_dir)
def main():
    repo = github3.repository('dimagi', 'commcare-android')
    # Find first non-lts release
    for release in repo.releases():
        apk_assets = [a for a in release.assets() if is_release_apk(a.name)]
        if len(apk_assets) > 0:
            print(apk_assets[0].browser_download_url)
            return
Esempio n. 11
0
    def _build_head_html(self, base, head):        
        head_repo = github3.repository(*head.repo)
        base_repo = github3.repository(*base.repo)

        if os.path.exists(self._head_dir):
            shutil.rmtree(self._head_dir)
                
        self._updater.update(
            status='pending', description="Getting head repository.")
        clone_repo(head_repo.clone_url, self._head_dir)
        add_fetch_remote("upstream", base_repo.clone_url, 
                         cwd=self._head_dir)
        checkout_commit(base.ref, cwd=self._head_dir)
        merge_commit("origin", head.ref, cwd=self._head_dir)

        self._updater.update(
            status='pending', description="Building head website.")
        subprocess.check_call(build_html, shell=True, cwd=self._head_dir)
Esempio n. 12
0
File: repo.py Progetto: simonvpe/trj
    def __init__(self, login_str, repo, branch, credentials=None):
        self.login = login_str
        self.repo_name = repo
        self.branch_name = branch

        if credentials is not None:
            github = login(username=credentials.username, password=credentials.password)

            self.repo = github.repository(self.login, self.repo_name)
        else:
            self.repo = repository(self.login, self.repo_name)
 def check_release(self):
     prerelease = self.arguments.get("--prerelease")
     repository = github3.repository('yetu', '/omnibus-atools')
     releases = repository.releases()
     # Get the first Release
     for release in releases:
         if release.draft == False and release.prerelease == prerelease:
             self.remote_version = release.tag_name
             if self.remote_version[0] == "v":
                 self.remote_version = self.remote_version[1:]
             self.remote_assets = release.assets(-1)
Esempio n. 14
0
    def login(self):
        if self.token:
            # login to github
            gh = github3.login(token=str(self.token))
            try:
                # test if we're actually logged in
                gh.me()
            except Exception as e:
                self.module.fail_json(msg="Failed to connect to Github: {}".format(e))

            self.repository = gh.repository(str(self.user), str(self.repo))
        else:
            self.repository = github3.repository(str(self.user), str(self.repo))
Esempio n. 15
0
    def _build_base_html(self, base):
        base_repo = github3.repository(*base.repo)

        if os.path.exists(self._base_dir):
            shutil.rmtree(self._base_dir)
        
        self._updater.update(
            status='pending', description="Getting base repository.")
        clone_repo(base_repo.clone_url, self._base_dir)
        checkout_commit(base.ref, cwd=self._base_dir)

        self._updater.update(
            status='pending', description="Building base website.")
        subprocess.check_call(build_html, cwd=self._base_dir, shell=True)
def find_issues_uneven_weekly_commits(repo):

    person_map = anonymize_persons()

    weekly_commit_count_per_person = []

    for p in xrange(0, 4):
        weekly_commit_count = {}
        for i in xrange(1, 16):
            weekly_commit_count.setdefault(i, 0)
        weekly_commit_count_per_person.append(weekly_commit_count)

    print weekly_commit_count_per_person

    repo = github3.repository(repo[0], repo[1])
    all_commits = repo.commits()

    for commit in all_commits:
        commit_date = commit.commit.committer["date"]
        committer = commit.commit.committer["name"]
        week_no = dateutil.parser.parse(commit_date).isocalendar()[1]
        #print repo.commit(commit1.sha).commit.committer["date"]

        if weekly_commit_count_per_person[person_map[committer] -
                                          1].has_key(week_no):
            weekly_commit_count_per_person[person_map[committer] -
                                           1][week_no] += 1
        else:
            weekly_commit_count_per_person[person_map[committer] -
                                           1][week_no] = 1

    print(weekly_commit_count_per_person)

    for per in xrange(1, 5):
        f = open(
            './features/repo_' + str(repo_count) + '_person_' + str(per) +
            '_uneven_person_commits.csv', 'wt')

        try:
            writer = csv.writer(f)
            writer.writerow(('Week_No', 'Commit_Count'))

            for (key,
                 value) in weekly_commit_count_per_person[per - 1].items():
                writer.writerow((key, value))

        finally:
            f.close()
def find_issue_without_milestones(repo):

    issues = github3.issues_on(repo[0], repo[1], state='all')

    ## Since pull requests are also captured as issues creating
    ## the list the list pull requests for comparison with issues
    repo = github3.repository(repo[0], repo[1])
    pull_requests = repo.pull_requests(state='all')

    pull_request_map = {}
    for pull_request in pull_requests:
        pull_request_map[pull_request.number] = pull_request.title

    for issue in issues:
        if issue.milestone is None:
            if not pull_request_map.has_key(issue.number):
                per_repo_unassigned_issues[repo_count] += 1
def find_issue_without_milestones(repo):

	issues = github3.issues_on(repo[0], repo[1], state='all')
	
	## Since pull requests are also captured as issues creating 
	## the list the list pull requests for comparison with issues
	repo = github3.repository(repo[0], repo[1])
	pull_requests = repo.pull_requests(state='all')

	pull_request_map = {}
	for pull_request in pull_requests:
		pull_request_map[pull_request.number] = pull_request.title

	for issue in issues:
		if issue.milestone is None:
			if not pull_request_map.has_key(issue.number):
				per_repo_unassigned_issues[repo_count] += 1
def find_issues_uneven_weekly_commits(repo):

	person_map = anonymize_persons()

	weekly_commit_count_per_person=[]

	for p in xrange(0,4):
		weekly_commit_count = {} 
		for i in  xrange(1,16):
			weekly_commit_count.setdefault(i,0)
		weekly_commit_count_per_person.append(weekly_commit_count)
	
	print weekly_commit_count_per_person

	repo = github3.repository(repo[0], repo[1])
	all_commits = repo.commits()

	for commit in all_commits:
		commit_date = commit.commit.committer["date"]
		committer = commit.commit.committer["name"]
		week_no = dateutil.parser.parse(commit_date).isocalendar()[1]
		#print repo.commit(commit1.sha).commit.committer["date"]

		if weekly_commit_count_per_person[person_map[committer]-1].has_key(week_no):
			weekly_commit_count_per_person[person_map[committer]-1][week_no] += 1
		else:
			weekly_commit_count_per_person[person_map[committer]-1][week_no] = 1

	print (weekly_commit_count_per_person)

	for per in xrange(1,5):
		f = open('./features/repo_'+str(repo_count)+'_person_'+str(per)+'_uneven_person_commits.csv', 'wt')
		
		try:
			writer = csv.writer(f)
			writer.writerow( ('Week_No', 'Commit_Count'))
	
			for (key, value) in weekly_commit_count_per_person[per-1].items():
				writer.writerow( (key, value))

		finally:
			f.close()
Esempio n. 20
0
def get_version(dep_id=None):
    print('dep_id = {}'.format(dep_id)) 
    verinfo = {}
    user = dep_id.split('/')[0]
    repo = dep_id.split('/')[1]
    # TODO: If API limits are reached, the (authenticated) github3 object from the
    # calling script will need to make an appearance here somehow.
    repo = github3.repository(user, repo)
    # Determine the 'best' release version.
    # This will depend on how the repository is organized and how releases are done.
    # Easiest is if the repo uses Github releases consistently. Just query that.
    # Second best is a simple semver tag. Sort and pick the latest.
    # The problem is, these release practices have to be adhered to strictly, otherwise
    # incorrect information will be harvested here.
    #
    # What heuristic can be used to get only tags that look 'release-like'?
    latestrel = repo.latest_release()
    
    verinfo['version'] = '1.2.3'
    return(verinfo)
Esempio n. 21
0
    def getGithubObject(self, name, kind = None):
        """

        """

        # print("GitHub3Helper.getGithubObject(): %s (kind: %s)" % ( name, kind ))

        # if full github url, strip off the prefix
        if name.startswith("https://github.com/"):
            name = name[len("https://github.com/"):]
            pass

        # XXX i think we need to replace http://github.com with https://github.com
        
        # if kind is not None:
        #    kind = kind.lower()
        #    pass
        
        if kind == "User" or kind == "user":
            return self.github.user(name)

        if kind == "Organization" or kind == "org" or kind == "organization":
            return self.github.organization(name)

        if kind == "Repository" or kind == "repo":
            owner, repoName = name.split("/")
            # return github3.repository(owner, repoName)
            return self.github.repository(owner, repoName)

        if kind is not None:
            print("  kind not recognized: %r" % kind)
            xxx
            pass
        
        #
        #
        #
        if "/" in name:
            owner, repoName = name.split("/")
            return github3.repository(owner, repoName)
        
        # try both user and org.  if both, panic

        user = self.github.user(name)
        org  = self.github.organization(name)

        if user and org:
            # XXX this needs to be policy
            print("  note: both user and org exist - returning org: %s" % name)
            return org

        if user:
            return user

        if org:
            return org

        print("GitHub3Helper.getGithubObject(): %s (kind: %s)" % ( name, kind ))
        print("  no kind specified or obvious, and both user and org are null")
        print("    user: %r" % user)
        print("    org:  %r" % org)
        
        return None
Esempio n. 22
0
def get_repo_last_commit_delta_time(owner, repo):
    repo = github3.repository(owner, repo)
    return repo.pushed_at.astimezone(local_tz)
Esempio n. 23
0
def get_open_pull_requests(repo_organization, repo_name, token=None):

    repository = gh.repository(repo_organization, repo_name)
    return repository.pull_requests(state='open',
                                    sort='created',
                                    direction='asc')
Esempio n. 24
0
def main():
    repo = github3.repository('dimagi', 'commcare-android')
    latest_release = repo.latest_release()
    apk_assets = [a for a in latest_release.assets() if is_release_apk(a.name)]
    print(apk_assets[0].browser_download_url)
 def test_repository(self):
     args = ('owner', 'repo')
     github3.repository(*args)
     self.gh.repository.assert_called_with(*args)
Esempio n. 26
0
import github3
import zipfile
import platform
import tarfile
from shutil import move
import os
import sys

if len( sys.argv ) > 1:
    repo = github3.login( token=sys.argv[1] ).repository( "premake", "premake-core" )
else:
    repo = github3.repository( "premake", "premake-core" )

ignoreReleases = [ "Premake 5.0 alpha 4", "Premake 5.0 alpha 5" ]


for release in reversed(list(repo.releases())):

    # remove the ignored releases
    if any(release.name in s for s in ignoreReleases):
        continue
        
    print( "Downloading " + release.name )
    
    for asset in release.assets():
        
        if platform.system() == "Linux" and "linux" in asset.name:
        
            asset.download("bin/temp/" + asset.name)
            tar = tarfile.open("bin/temp/" + asset.name, "r:gz")
            tar.extractall( "bin" )
Esempio n. 27
0
 def test_repository(self):
     expect(github3.repository(self.sigm, self.todo)).isinstance(
             github3.repos.Repository
             )
Esempio n. 28
0
    def execute(self, rc):
        event_name = rc.event.name
        pr = rc.event.data  # pull request object
        #job = pr.repository + (pr.number,)  # job key (owner, repo, number) 
        job = pr.base.repo + (pr.number,)  # job key (owner, repo, number) 
        #jobdir = "${HOME}/" + "--".join(pr.repository + (str(pr.number),))
        jobdir = "${HOME}/" + "--".join(pr.base.repo + (str(pr.number),))
        jobs = PersistentCache(cachefile=rc.batlab_jobs_cache)
        event = rc.event = Event(name='batlab-status', data={'status': 'error', 
                                 'number': pr.number, 'description': ''})
        # connect to batlab
        key = paramiko.RSAKey(filename=rc.ssh_key_file)
        client = paramiko.SSHClient()
        client.load_system_host_keys()
        client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
        #client.get_host_keys().add(BATLAB_SUBMIT_HOSTNAME, 'ssh-rsa', key)
        try:
            client.connect(BATLAB_SUBMIT_HOSTNAME, username=rc.batlab_user,
                           key_filename=rc.ssh_key_file)
        except (paramiko.BadHostKeyException, paramiko.AuthenticationException, 
                paramiko.SSHException, socket.error):
            msg = 'Error connecting to BaTLab.'
            warn(msg, RuntimeWarning)
            event.data['description'] = msg
            return
        # if sync event, kill an existing job.
        if event_name == 'github-pr-sync' and job in jobs:
            try:
                cmd = rc.batlab_kill_cmd + ' ' + jobs[job]['gid']
                sin, out, err = client.exec_command(cmd)
                out.channel.recv_exit_status()
            except paramiko.SSHException:
                event.data['description'] = "Error killing existing BaTLab job."
                return
            del jobs[job]

        # make sure we have a clean jobdir
        stdin, stdout, sterr = client.exec_command('rm -rf ' + jobdir)
        stdout.channel.recv_exit_status()
        # put the scripts on batlab in a '~/owner--reposiotry--number' dir
        if rc.batlab_scripts_url.endswith('.git'):
            cmd = 'git clone {0} {1}'.format(rc.batlab_scripts_url, jobdir)
            try:
                stdin, stdout, sterr = client.exec_command(cmd)
                stdout.channel.recv_exit_status()
            except paramiko.SSHException:
                event.data['description'] = "Error cloning BaTLab scripts."
                return            
        elif rc.batlab_scripts_url.endswith('.zip'):
            cmds = unzip_cmds_template.format(jobdir=jobdir, 
                    batlab_scripts_url=rc.batlab_scripts_url)

            try:
                stdin, stdout, sterr = client.exec_command(cmds)
                stdout.channel.recv_exit_status()
            except paramiko.SSHException:
                event.data['description'] = "Error unzipping BaTLab scripts."
                return            
            cmd = 'ls {0}'.format(jobdir)
            stdin, stdout, sterr = client.exec_command(cmd)
            stdout.channel.recv_exit_status()
 
            ls = stdout.read().split()
            if len(ls) == 1:
                try:
                    cmd = 'mv {0}/{1}/* {0}'.format(jobdir, ls[0])
                    stdin, stdout, sterr = client.exec_command(cmd)
                    stdout.channel.recv_exit_status()
                except paramiko.SSHException:
                    event.data['description'] = "Error moving BaTLab scripts."
                    return            
        else:
            raise ValueError("rc.batlab_scripts_url not understood.")

        # Overwrite fetch file
        head_repo = github3.repository(*pr.head.repo)
        fetch = git_fetch_template.format(repo_url=head_repo.clone_url,
                                          repo_dir=job[1], branch=pr.head.ref)
        cmd = 'echo "{0}" > {1}/{2}'.format(fetch, jobdir, rc.batlab_fetch_file)
        try:
            stdin, stdout, sterr = client.exec_command(cmd)
            stdout.channel.recv_exit_status()
        except paramiko.SSHException:
            event.data['description'] = "Error overwritting fetch file."
            return
        
        # append callbacks to run spec
        try:
            cmd = 'cat {0}/{1}'.format(jobdir, rc.batlab_run_spec)
            _, x, _ = client.exec_command(cmd)
            x.channel.recv_exit_status()
            append = ', <a href="{0}/dashboard">{1}</a>'.format(
                rc.server_url, 
                "Polyphemus Dashboard")
            run_spec_lines = [l.strip() for l in x.readlines()]
            run_spec_lines = [
                (l + append if l.split('=')[0].strip() == "description" else l) 
                for l in run_spec_lines
                ]
            
            pre_file = _ensure_task_script('pre_all', run_spec_lines, 
                                           rc.batlab_run_spec, jobdir, client)
            pre_curl = pre_curl_template.format(number=pr.number, port=rc.port, 
                                                server_url=rc.server_url)
            cmd = 'echo "{0}" >> {1}/{2}'.format(pre_curl, jobdir, pre_file)
            sin, out, err = client.exec_command(cmd)
            out.channel.recv_exit_status()
            post_file = _ensure_task_script('post_all', run_spec_lines, 
                                            rc.batlab_run_spec, jobdir, client)
            _ensure_runspec_option('always_run_post_all', run_spec_lines, 
                                   rc.batlab_run_spec, jobdir, client, 'true')
            post_curl = post_curl_template.format(number=pr.number, port=rc.port, 
                                                  server_url=rc.server_url)

            cmd = 'echo "{0}" >> {1}/{2}'.format(post_curl, jobdir, post_file)
            stdin, stdout, sterr = client.exec_command(cmd)
            stdout.channel.recv_exit_status()
        except paramiko.SSHException:
            event.data['description'] = "Error appending BaTLab callbacks."
            return            

        # create scp for jobdir
        jobdir_scp = jobdir_scp_template.format(jobdir=jobdir)
        cmd = 'echo "{0}" >> {1}/jobdir.scp'.format(jobdir_scp, jobdir)
        try:
            stdin, stdout, sterr = client.exec_command(cmd)
            stdout.channel.recv_exit_status()
        except paramiko.SSHException:
            event.data['description'] = "Error creating jobdir.scp file."
            return            

        try:
            inputs = run_spec_lines[_find_startswith(run_spec_lines, 'inputs')].strip()
        except IndexError:
            event.data['description'] = "Error with run_spec formatting."
            return

        if len(inputs.split('=', 1)[-1].strip()) > 0:
            cmd = "sed -i 's:{0}:{0},jobdir.scp:' {1}/{2}"
        else:
            cmd = "sed -i 's:{0}:jobdir.scp:' {1}/{2}"
        try:
            stdin, stdout, sterr = client.exec_command(cmd.format(inputs,
                                                                  jobdir, 
                                                                  rc.batlab_run_spec))
            stdout.channel.recv_exit_status()
        except paramiko.SSHException:
            event.data['description'] = "Error adding jobdir.scp to inputs."
            return            

        # submit the job
        cmd = 'cd {0}; {1} {2}'
        cmd = cmd.format(jobdir, rc.batlab_submit_cmd, rc.batlab_run_spec)
        try:
            _, submitout, submiterr = client.exec_command(cmd)
            submitout.channel.recv_exit_status()
        except paramiko.SSHException:
            event.data['description'] = "Error submitting BaTLab job."
            return
        err = submiterr.read().strip()
        if 0 < len(err):
            event.data['description'] = err
            warn("BaTLab job unsuccessfully submitted:\n" + err, RuntimeWarning)
            return

        # clean up
        lines = submitout.readlines()
        #import pprint
        #pprint.pprint(lines)
        #pprint.pprint(submiterr.read())
        report_url = lines[-1].strip()
        gid = lines[0].split()[-1]
        client.close()
        jobs[job] = {'gid': gid, 'report_url': report_url, 'dir': jobdir}
        if rc.verbose:
            print("BaTLab reporting link: " + report_url)
        event.data.update(status='pending', description="BaTLab job submitted.",
                          target_url=report_url)
Esempio n. 29
0
import github3
import logging

# Set up a file to have all the logs written to
file_handler = logging.FileHandler('github_script.log')

# Send the logs to stderr as well
stream_handler = logging.StreamHandler()

# Format the log output and include the log level's name and the time it was
# generated
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')

# Use that Formatter on both handlers
file_handler.setFormatter(formatter)
stream_handler.setFormatter(formatter)

# Get the logger used by github3.py internally by referencing its name
# directly
logger = logging.getLogger('github3')
# Add the handlers to it
logger.addHandler(file_handler)
logger.addHandler(stream_handler)
# Set the level which determines what you see
logger.setLevel(logging.DEBUG)

# Make a library call and see the information posted
r = github3.repository('sigmavirus24', 'github3.py')
print('{0} - {0.html_url}'.format(r))
Esempio n. 30
0
                major=version._version.release[0],
                minor=version._version.release[1],
            )

            if master_version in pyenv_versions:
                if pyenv_versions[master_version] < version:
                    pyenv_versions[master_version] = version
            else:
                pyenv_versions[master_version] = version

print('PyENV Versions:')
for major, version in pyenv_versions.items():
    print(version.public)

# Check for Python Releases via github Tags
python = repository('python', 'cpython')
python_releases = [version for version in python.tags()]
releases = {}
for release in python_releases:
    version = parse_version(release.name)
    if isinstance(version, Version):
        master_version = '{major}.{minor}'.format(
            major=version._version.release[0],
            minor=version._version.release[1],
        )

        if master_version in releases:
            if releases[master_version] < version:
                releases[master_version] = version
        else:
            releases[master_version] = version
def main():
    repo = github3.repository("dimagi", "commcare-android")
    latest_release = repo.latest_release()
    apk_assets = [a for a in latest_release.assets() if is_release_apk(a.name)]
    print(apk_assets[0].browser_download_url)
Esempio n. 32
0
 def test_repository(self):
     """Show that github3.repository proxies to GitHub."""
     github3.repository('sigmavirus24', 'github3.py')
     self.gh.repository.assert_called_once_with('sigmavirus24',
                                                'github3.py')