Ejemplo n.º 1
0
def main(repo_path, output_file):
    repo = pygit2.Repository(repo_path)
    registrations = recursive_process(repo)
    with open(output_file, 'w') as f:
        json.dump(registrations, f, indent=4, sort_keys=True)
Ejemplo n.º 2
0
 def test_bytes_string(self):
     repo_path = b'./test/data/testrepo.git/'
     pygit2.Repository(repo_path)
Ejemplo n.º 3
0
def test_switch_branch(data_working_copy, geopackage, cli_runner, tmp_path):
    raise pytest.skip(
    )  # apsw.SQLError: SQLError: Safety level may not be changed inside a transaction
    with data_working_copy("points") as (repo_path, wc):
        db = geopackage(wc)

        # creating a new branch with existing name errors
        r = cli_runner.invoke(["switch", "-c", "master"])
        assert r.exit_code == 2, r
        assert r.stdout.splitlines()[-1].endswith(
            "A branch named 'master' already exists.")

        subprocess.run(["git", "init", "--bare", str(tmp_path)], check=True)
        r = cli_runner.invoke(["remote", "add", "myremote", tmp_path])
        assert r.exit_code == 0, r

        r = cli_runner.invoke(["push", "--set-upstream", "myremote", "master"])
        assert r.exit_code == 0, r

        # new branch
        r = cli_runner.invoke(["switch", "-c", "foo"])
        assert r.exit_code == 0, r

        repo = pygit2.Repository(str(repo_path))
        assert repo.head.name == "refs/heads/foo"
        assert "foo" in repo.branches
        assert repo.head.peel(pygit2.Commit).hex == H.POINTS.HEAD_SHA

        # make some changes
        db = geopackage(wc)
        with db:
            cur = db.cursor()

            cur.execute(H.POINTS.INSERT, H.POINTS.RECORD)
            assert db.changes() == 1

            cur.execute(f"UPDATE {H.POINTS.LAYER} SET fid=30000 WHERE fid=3;")
            assert db.changes() == 1

        r = cli_runner.invoke(["commit", "-m", "test1"])
        assert r.exit_code == 0, r

        new_commit = repo.head.peel(pygit2.Commit).hex
        assert new_commit != H.POINTS.HEAD_SHA

        r = cli_runner.invoke(["switch", "master"])
        assert r.exit_code == 0, r

        assert H.row_count(db, H.POINTS.LAYER) == H.POINTS.ROWCOUNT

        assert repo.head.name == "refs/heads/master"
        assert repo.head.peel(pygit2.Commit).hex == H.POINTS.HEAD_SHA

        # make some changes
        with db:
            cur = db.cursor()

            cur.execute(H.POINTS.INSERT, H.POINTS.RECORD)
            assert db.changes() == 1

            cur.execute(f"UPDATE {H.POINTS.LAYER} SET fid=40000 WHERE fid=4;")
            assert db.changes() == 1

        r = cli_runner.invoke(["switch", "foo"])
        assert r.exit_code == INVALID_OPERATION, r
        assert "Error: You have uncommitted changes in your working copy." in r.stdout

        r = cli_runner.invoke(["switch", "foo", "--discard-changes"])
        assert r.exit_code == 0, r

        assert H.row_count(db, H.POINTS.LAYER) == H.POINTS.ROWCOUNT + 1

        assert repo.head.name == "refs/heads/foo"
        assert repo.head.peel(pygit2.Commit).hex == new_commit

        # new branch from remote
        r = cli_runner.invoke(["switch", "-c", "test99", "myremote/master"])
        assert r.exit_code == 0, r
        assert repo.head.name == "refs/heads/test99"
        assert "test99" in repo.branches
        assert repo.head.peel(pygit2.Commit).hex == H.POINTS.HEAD_SHA
        branch = repo.branches["test99"]
        assert branch.upstream_name == "refs/remotes/myremote/master"

        assert H.row_count(db, H.POINTS.LAYER) == H.POINTS.ROWCOUNT
Created on Tue Jul 25 14:15:23 2017

@author: Madhura Kashikar
"""

import pygit2
import re
import string
import pygit2
import subprocess
import re
from collections import Counter
import pandas as pd
#import subprocess

repo = pygit2.Repository('C:/Users/Madhura Kashikar/Desktop/Trial/Python')
print(repo.path)
print(repo.workdir)

print("\n")

commit = repo[repo.head.target]
name_a = []
commit_list = []
last = repo[repo.head.target]
emaila = []
email_u = []
data = []
email = []
cid = []
author = []
Ejemplo n.º 5
0
    def test_dumping_ticket(self, send_email):
        """ Test dumping a ticket into a JSON blob. """
        send_email.return_value = True

        tests.create_projects(self.session)

        # Create repo
        self.gitrepo = os.path.join(tests.HERE, 'tickets', 'test.git')
        repopath = os.path.join(tests.HERE, 'tickets')
        os.makedirs(self.gitrepo)
        repo_obj = pygit2.init_repository(self.gitrepo, bare=True)

        repo = pagure.lib.get_project(self.session, 'test')
        # Create an issue to play with
        msg = pagure.lib.new_issue(session=self.session,
                                   repo=repo,
                                   title='Test issue',
                                   content='We should work on this',
                                   user='******',
                                   ticketfolder=repopath)
        self.assertEqual(msg.title, 'Test issue')

        # Need another two issue to test the dependencie chain
        msg = pagure.lib.new_issue(session=self.session,
                                   repo=repo,
                                   title='Test issue #2',
                                   content='Another bug',
                                   user='******',
                                   ticketfolder=repopath)
        self.assertEqual(msg.title, 'Test issue #2')
        msg = pagure.lib.new_issue(session=self.session,
                                   repo=repo,
                                   title='Test issue #3',
                                   content='That would be nice feature no?',
                                   user='******',
                                   ticketfolder=repopath)
        self.assertEqual(msg.title, 'Test issue #3')

        issue = pagure.lib.search_issues(self.session, repo, issueid=1)
        issue2 = pagure.lib.search_issues(self.session, repo, issueid=2)
        issue3 = pagure.lib.search_issues(self.session, repo, issueid=3)

        # Add a couple of comment on the ticket
        msg = pagure.lib.add_issue_comment(
            session=self.session,
            issue=issue,
            comment='Hey look a comment!',
            user='******',
            ticketfolder=repopath,
        )
        self.session.commit()
        self.assertEqual(msg, 'Comment added')
        msg = pagure.lib.add_issue_comment(
            session=self.session,
            issue=issue,
            comment='crazy right?',
            user='******',
            ticketfolder=repopath,
        )
        self.session.commit()
        self.assertEqual(msg, 'Comment added')
        # Assign the ticket to someone
        msg = pagure.lib.add_issue_assignee(
            session=self.session,
            issue=issue,
            assignee='pingou',
            user='******',
            ticketfolder=repopath,
        )
        self.session.commit()
        self.assertEqual(msg, 'Issue assigned')
        # Add a couple of tags on the ticket
        msg = pagure.lib.add_tag_obj(
            session=self.session,
            obj=issue,
            tags=[' feature ', 'future '],
            user='******',
            ticketfolder=repopath,
        )
        self.session.commit()
        self.assertEqual(msg, 'Tag added: feature, future')
        # Add dependencies
        msg = pagure.lib.add_issue_dependency(
            session=self.session,
            issue=issue,
            issue_blocked=issue2,
            user='******',
            ticketfolder=repopath,
        )
        self.session.commit()
        self.assertEqual(msg, 'Dependency added')
        msg = pagure.lib.add_issue_dependency(
            session=self.session,
            issue=issue3,
            issue_blocked=issue,
            user='******',
            ticketfolder=repopath,
        )
        self.session.commit()
        self.assertEqual(msg, 'Dependency added')

        # Dump the JSON
        pagure.lib.git.update_git(issue, repo, repopath)
        repo = pygit2.Repository(self.gitrepo)
        cnt = len([
            commit for commit in repo.walk(repo.head.target,
                                           pygit2.GIT_SORT_TOPOLOGICAL)
        ])
        self.assertEqual(cnt, 10)

        last_commit = repo.revparse_single('HEAD')
        patch = pagure.lib.git.commit_to_patch(repo, last_commit)
        for line in patch.split('\n'):
            if line.startswith('--- a/'):
                fileid = line.split('--- a/')[1]
                break

        newpath = tempfile.mkdtemp(prefix='pagure-dump-load')
        clone_repo = pygit2.clone_repository(self.gitrepo, newpath)

        self.assertEqual(len(os.listdir(newpath)), 4)

        ticket_json = os.path.join(tests.HERE, 'test_ticket.json')
        self.assertFalse(os.path.exists(ticket_json))
        shutil.copyfile(os.path.join(newpath, fileid), ticket_json)
        self.assertTrue(os.path.exists(ticket_json))

        shutil.rmtree(newpath)
Ejemplo n.º 6
0
def merge_request_pull(repo, requestid, username=None):
    """ Request pulling the changes from the fork into the project.
    """
    request = spechub.lib.get_pull_request(SESSION,
                                           project=repo,
                                           requestid=requestid)
    repo = request.repo_from

    if not request:
        flask.abort(404, 'Pull-request not found')

    reponame = os.path.join(APP.config['FORK_FOLDER'], repo.path)

    if not os.path.exists(reponame):
        flask.abort(404, 'Project not found')

    if not is_repo_admin(repo):
        flask.abort(
            403, 'You are not allowed to merge pull-request for this project')

    error_output = flask.url_for('request_pull',
                                 repo=repo.name,
                                 requestid=requestid)
    if username:
        error_output = flask.url_for('fork_request_pull',
                                     repo=repo.name,
                                     requestid=requestid,
                                     username=username)

    # Get the fork
    if request.repo_from.is_fork:
        repopath = os.path.join(APP.config['FORK_FOLDER'],
                                request.repo_from.path)
    else:
        repopath = os.path.join(APP.config['GIT_FOLDER'],
                                request.repo_from.path)
    fork_obj = pygit2.Repository(repopath)

    # Get the original repo
    parentpath = os.path.join(APP.config['GIT_FOLDER'], request.repo.path)
    orig_repo = pygit2.Repository(parentpath)

    # Clone the original repo into a temp folder
    newpath = tempfile.mkdtemp()
    new_repo = pygit2.clone_repository(parentpath, newpath)

    repo_commit = fork_obj[request.stop_id]

    ori_remote = new_repo.remotes[0]
    # Add the fork as remote repo
    reponame = '%s_%s' % (request.user.user, repo.name)
    remote = new_repo.create_remote(reponame, repopath)

    # Fetch the commits
    remote.fetch()

    merge = new_repo.merge(repo_commit.oid)
    master_ref = new_repo.lookup_reference('HEAD').resolve()

    refname = '%s:%s' % (master_ref.name, master_ref.name)
    if merge.is_uptodate:
        flask.flash('Nothing to do, changes were already merged', 'error')
        spechub.lib.close_pull_request(SESSION, request)
        SESSION.commit()
        return flask.redirect(error_output)
    elif merge.is_fastforward:
        master_ref.target = merge.fastforward_oid
        ori_remote.push(refname)
        flask.flash('Changes merged!')
    else:
        new_repo.index.write()
        try:
            tree = new_repo.index.write_tree()
        except pygit2.GitError:
            shutil.rmtree(newpath)
            flask.flash('Merge conflicts!', 'error')
            return flask.redirect(
                flask.url_for('request_pull',
                              repo=repo.name,
                              username=username,
                              requestid=requestid))
        head = new_repo.lookup_reference('HEAD').get_object()
        commit = new_repo[head.oid]
        sha = new_repo.create_commit(
            'refs/heads/master', repo_commit.author, repo_commit.committer,
            'Merge #%s `%s`' % (request.id, request.title), tree,
            [head.hex, repo_commit.oid.hex])
        ori_remote.push(refname)
        flask.flash('Changes merged!')

    # Update status
    spechub.lib.close_pull_request(SESSION, request)
    SESSION.commit()
    shutil.rmtree(newpath)

    return flask.redirect(flask.url_for('view_repo', repo=repo.name))
Ejemplo n.º 7
0
def request_pull(repo, requestid, username=None):
    """ Request pulling the changes from the fork into the project.
    """

    request = spechub.lib.get_pull_request(SESSION,
                                           project=repo,
                                           requestid=requestid)

    if not request:
        flask.abort(404, 'Pull-request not found')

    project = request.repo_from

    reponame = os.path.join(APP.config['FORK_FOLDER'], project.path)
    if not request.status:
        project = request.repo
        reponame = os.path.join(APP.config['GIT_FOLDER'], project.path)

    if not os.path.exists(reponame):
        flask.abort(404, 'Project not found')

    repo_obj = pygit2.Repository(reponame)

    if project.parent:
        parentname = os.path.join(APP.config['GIT_FOLDER'],
                                  project.parent.path)
    else:
        parentname = os.path.join(APP.config['GIT_FOLDER'], project.path)
    orig_repo = pygit2.Repository(parentname)

    diff_commits = []
    diffs = []
    repo_commit = repo_obj[request.stop_id]
    if not repo_obj.is_empty and not orig_repo.is_empty:
        orig_commit = orig_repo[orig_repo.lookup_branch(
            request.branch).get_object().hex]

        master_commits = [
            commit.oid.hex for commit in orig_repo.walk(
                orig_repo.lookup_branch(request.branch).get_object().hex,
                pygit2.GIT_SORT_TIME)
        ]

        repo_commit = repo_obj[request.stop_id]

        for commit in repo_obj.walk(request.stop_id, pygit2.GIT_SORT_TIME):
            if request.status and commit.oid.hex in master_commits:
                break
            elif not request.status and commit.oid.hex == request.start_id:
                break
            diff_commits.append(commit)
            diffs.append(
                repo_obj.diff(
                    repo_obj.revparse_single(commit.parents[0].oid.hex),
                    repo_obj.revparse_single(commit.oid.hex)))

    elif orig_repo.is_empty:
        orig_commit = None
        diff = repo_commit.tree.diff_to_tree(swap=True)
    else:
        flask.flash('Fork is empty, there are no commits to request pulling',
                    'error')
        return flask.redirect(
            flask.url_for('view_repo', username=username, repo=repo.name))

    html_diffs = []
    for diff in diffs:
        html_diffs.append(
            highlight(diff.patch, DiffLexer(),
                      HtmlFormatter(
                          noclasses=True,
                          style="tango",
                      )))

    return flask.render_template(
        'pull_request.html',
        select='requests',
        requestid=requestid,
        repo=repo,
        username=username,
        request=request,
        #repo_admin=is_repo_admin(request.repo),
        repo_obj=repo_obj,
        orig_repo=orig_repo,
        diff_commits=diff_commits,
        diffs=diffs,
        html_diffs=html_diffs,
        forks=spechub.lib.get_forks(SESSION, request.repo),
    )
Ejemplo n.º 8
0
 def _use_existing_checkout(self, source_repo, checkout_dir):
     """Update and use existing checkout."""
     repo = pygit2.Repository(checkout_dir)
     osv.reset_repo(repo, git_callbacks=self._git_callbacks(source_repo))
     logging.info('Using existing checkout at %s', checkout_dir)
     return repo
Ejemplo n.º 9
0
def scrape(project, task):
    print("PROCESSING " + project)

    base_url = "https://issues.apache.org/jira/rest/api/latest/issue/"

    create_dirs(project)

    # fname = "data/csvs/" + project + ".csv"
    fname = "data/csvs/" + project + "_commits.csv"

    repo = pygit2.Repository("../apache/" + project.lower())

    ranges = {
        "ACCUMULO": range(2460, 4675),
        "AMBARI": range(6271, 22780),
        "HADOOP": range(6243, 13891),
        "JCR": range(892, 4119),
        "LUCENE": range(701, 11184),
        "OOZIE": range(609, 3316)
    }

    with open(fname, "a+") as f:
        # this isn't a good method but shouldn't be too much of an issue because the file shouldn't get too large
        def get_start():
            try:
                with open("data/starts/" + project + "_start.txt", "r") as t:
                    try:
                        start = int(t.readline())
                    except ValueError:
                        return 1

                return start
            except FileNotFoundError:
                return 1

        start = get_start()

        if start == 1:
            # f.write("bug_id,issue_type,severity,days_to_close,num_comments,num_commenters,breaks,is_broken_by\n")
            f.write(
                "sha,bug_id,num_files,file_types,avg_line_age,num_owners\n")
            start = ranges[project].start

        issue_no = 0

        def write_all_issues_to_file():
            nonlocal issue_no
            issue_no = start
            consecutive_missed = 0

            threshold_for_missed = 20

            while issue_no < ranges[project].stop:
                try:
                    # task(f, project, project_url, issue_no)
                    task(f, project, issue_no, repo)
                except IssueNotExistingError:
                    consecutive_missed += 1

                    if consecutive_missed < threshold_for_missed:
                        pass
                    else:
                        print("Terminating at issue " + str(issue_no))
                        return
                else:
                    consecutive_missed = 0
                finally:
                    with open("data/starts/" + project + "_start.txt",
                              "w+") as t:
                        t.write(str(issue_no))

                issue_no += 1

        write_all_issues_to_file()

    print("Done scraping", project, "at", issue_no, "\n")
    def test_view_commit_old(self):
        """ Test the view_commit_old endpoint. """

        tests.create_projects(self.session)
        tests.create_projects_git(os.path.join(self.path, "repos"), bare=True)

        # Add a README to the git repo - First commit
        tests.add_readme_git_repo(os.path.join(self.path, "repos", "test.git"))
        repo = pygit2.Repository(os.path.join(self.path, "repos", "test.git"))
        commit = repo.revparse_single("HEAD")

        # View first commit
        output = self.app.get("/test/%s" % commit.oid.hex)
        self.assertEqual(output.status_code, 302)

        output = self.app.get("/test/%s" % commit.oid.hex,
                              follow_redirects=True)
        self.assertEqual(output.status_code, 200)
        self.assertTrue('href="#commit-overview-collapse"' in output.get_data(
            as_text=True))
        self.assertTrue("Merged by Alice Author" in output.get_data(
            as_text=True))
        self.assertTrue("Committed by Cecil Committer" in output.get_data(
            as_text=True))

        self.assertTrue('title="View file as of %s"' %
                        commit.oid.hex[0:6] in output.get_data(as_text=True))

        # View first commit - with the old URL scheme
        output = self.app.get("/test/%s" % commit.oid.hex,
                              follow_redirects=True)
        self.assertEqual(output.status_code, 200)
        self.assertTrue('href="#commit-overview-collapse"' in output.get_data(
            as_text=True))
        self.assertTrue("Merged by Alice Author" in output.get_data(
            as_text=True))
        self.assertTrue("Committed by Cecil Committer" in output.get_data(
            as_text=True))

        # Add some content to the git repo
        tests.add_content_git_repo(os.path.join(self.path, "repos",
                                                "test.git"))

        repo = pygit2.Repository(os.path.join(self.path, "repos", "test.git"))
        commit = repo.revparse_single("HEAD")

        # View another commit
        output = self.app.get("/test/%s" % commit.oid.hex,
                              follow_redirects=True)
        self.assertEqual(output.status_code, 200)
        self.assertTrue('href="#commit-overview-collapse"' in output.get_data(
            as_text=True))
        self.assertTrue("Authored by Alice Author" in output.get_data(
            as_text=True))
        self.assertTrue("Committed by Cecil Committer" in output.get_data(
            as_text=True))

        # Add a fork of a fork
        item = pagure.lib.model.Project(
            user_id=1,  # pingou
            name="test3",
            description="test project #3",
            is_fork=True,
            parent_id=1,
            hook_token="aaabbbkkk",
        )
        self.session.add(item)
        self.session.commit()
        forkedgit = os.path.join(self.path, "repos", "forks", "pingou",
                                 "test3.git")

        tests.add_content_git_repo(forkedgit)
        tests.add_readme_git_repo(forkedgit)

        repo = pygit2.Repository(forkedgit)
        commit = repo.revparse_single("HEAD")

        # Commit does not exist in anothe repo :)
        output = self.app.get("/test/%s" % commit.oid.hex,
                              follow_redirects=True)
        self.assertEqual(output.status_code, 404)

        # View commit of fork
        output = self.app.get("/fork/pingou/test3/%s" % commit.oid.hex,
                              follow_redirects=True)
        self.assertEqual(output.status_code, 200)
        self.assertTrue('href="#commit-overview-collapse"' in output.get_data(
            as_text=True))
        self.assertTrue("  Authored by Alice Author\n" in output.get_data(
            as_text=True))
        self.assertTrue("  Committed by Cecil Committer\n" in output.get_data(
            as_text=True))

        self.assertTrue('title="View file as of %s"' %
                        commit.oid.hex[0:6] in output.get_data(as_text=True))

        # View commit of fork - With the old URL scheme
        output = self.app.get("/fork/pingou/test3/%s" % commit.oid.hex,
                              follow_redirects=True)
        self.assertEqual(output.status_code, 200)
        self.assertTrue('href="#commit-overview-collapse"' in output.get_data(
            as_text=True))
        self.assertTrue("Authored by Alice Author" in output.get_data(
            as_text=True))
        self.assertTrue("Committed by Cecil Committer" in output.get_data(
            as_text=True))

        # Try the old URL scheme with a short hash
        output = self.app.get(
            "/fork/pingou/test3/%s" % commit.oid.hex[:10],
            follow_redirects=True,
        )
        self.assertEqual(output.status_code, 404)
        self.assertIn("<p>Project not found</p>",
                      output.get_data(as_text=True))
Ejemplo n.º 11
0
    async def go(self):
        headers = {"Content-Type": "application/octet-stream"}
        async with aiohttp.ClientSession(headers=headers) as session:
            self.session = session

            proc = None
            self.code_analysis_port = None
            if self.remove_comments_enabled:
                ready = False

                for _ in range(7):
                    try:
                        self.code_analysis_port = utils.get_free_tcp_port()
                        proc = subprocess.Popen(
                            [
                                "rust-code-analysis-web",
                                "--port",
                                str(self.code_analysis_port),
                            ]
                        )
                    except FileNotFoundError:
                        raise Exception(
                            "rust-code-analysis is required for comment removal"
                        )

                    for _ in range(7):
                        try:
                            await self.session.get(
                                f"http://localhost:{self.code_analysis_port}/ping",
                                raise_for_status=True,
                            )
                            ready = True
                            break
                        except Exception:
                            if proc.poll() is not None:
                                break

                            time.sleep(1)

                    if ready:
                        break

                assert ready, "rust-code-analysis should be able to start"

            if os.path.exists(self.repo_out_dir):
                self.repo = pygit2.Repository(self.repo_out_dir)
                try:
                    last_commit_hash = utils.get_original_hash(self.repo, "HEAD")
                    self.rev_start = f"children({last_commit_hash})"
                except KeyError:
                    pass
            else:
                os.makedirs(self.repo_out_dir)
                self.repo = pygit2.init_repository(self.repo_out_dir)

            with hglib.open(self.repo_dir) as hg:
                revs = get_revs(hg, self.rev_start, self.rev_end)

            all_commits_done = True
            if self.limit is not None:
                if len(revs) > self.limit:
                    all_commits_done = False

                revs = revs[: self.limit]

            logger.info(f"Mining {len(revs)} commits...")

            cwd = os.getcwd()
            os.chdir(self.repo_dir)

            CHUNK_SIZE = 256
            revs_groups = [
                revs[i : (i + CHUNK_SIZE)] for i in range(0, len(revs), CHUNK_SIZE)
            ]

            with concurrent.futures.ThreadPoolExecutor(
                initializer=_init_thread, max_workers=os.cpu_count() + 1
            ) as executor:
                commits = executor.map(_hg_log, revs_groups)
                commits = tqdm(commits, total=len(revs_groups))
                commits = list(itertools.chain.from_iterable(commits))

                commits_num = len(commits)

                logger.info(f"Converting {commits_num} commits...")

                loop = asyncio.get_running_loop()
                loop.set_default_executor(executor)

                with hglib.open(".") as hg:
                    with open("errors.txt", "a", buffering=1) as f:
                        for commit in tqdm(commits):
                            try:
                                await self.convert(hg, commit)
                            except Exception as e:
                                logger.error(f"Error during transformation: {e}")
                                traceback.print_exc()
                                f.write(f"{commit.node} - {commit.parents}\n")

            os.chdir(cwd)

            while len(hg_servers) > 0:
                hg_server = hg_servers.pop()
                hg_server.close()

            if proc is not None:
                proc.terminate()

            return all_commits_done
Ejemplo n.º 12
0
 def commit_hash(cls, folder):
     '''Get the latest commit hash'''
     repo = pygit2.Repository(folder)
     commit = repo[repo.head.target]
     return commit.oid.hex[:8]
Ejemplo n.º 13
0
def view_docs(repo, username=None, namespace=None, filename=None):
    """ Display the documentation
    """
    if "." in repo:
        namespace, repo = repo.split(".", 1)

    repo = pagure.lib.query.get_authorized_project(SESSION,
                                                   repo,
                                                   user=username,
                                                   namespace=namespace)

    if not repo:
        flask.abort(404, "Project not found")

    if not repo.settings.get("project_documentation", True):
        flask.abort(404, "This project has documentation disabled")

    reponame = repo.repopath("docs")
    if not os.path.exists(reponame):
        flask.abort(404, "Documentation not found")

    repo_obj = pygit2.Repository(reponame)

    if not repo_obj.is_empty:
        commit = repo_obj[repo_obj.head.target]
    else:
        flask.abort(
            404,
            flask.Markup(
                "No content found in the repository, you may want to read "
                'the <a href="'
                'https://docs.pagure.org/pagure/usage/using_doc.html">'
                "Using the doc repository of your project</a> documentation."),
        )

    content = None
    tree = None
    if not filename:
        path = [""]
    else:
        path = [it for it in filename.split("/") if it]

    if commit:
        try:
            (tree, content,
             filename) = __get_tree_and_content(repo_obj, commit, path)
        except pagure.exceptions.FileNotFoundException as err:
            flask.flash("%s" % err, "error")
        except Exception as err:
            _log.exception(err)
            flask.abort(500, "Unkown error encountered and reported")

    if not content:
        if not tree or not len(tree):
            flask.abort(404, "No content found in the repository")
        html = "<li>"
        for el in tree:
            name = el.name
            # Append a trailing '/' to the folders
            if el.filemode == 16384:
                name += "/"
            html += '<ul><a href="{0}">{1}</a></ul>'.format(name, name)
        html += "</li>"
        content = TMPL_HTML.format(content=html)
        mimetype = "text/html"
    else:
        mimetype, _ = pagure.lib.mimetype.guess_type(filename, content)

    return flask.Response(content, mimetype=mimetype)
Ejemplo n.º 14
0
def repo(tmp_path):
    with utils.TemporaryRepository('gpgsigned.tar', tmp_path) as path:
        yield pygit2.Repository(path)
Ejemplo n.º 15
0
    def get_tp_hash(self):
        return CalcserverClient().get_tp_hash(self.__po_content)


if __name__ == "__main__":
    # a5420da8f86e7e5d5e670deb13833588b111380b - new files
    # cc447809d3248acc9f0382af32560472eef460b6 - changed files
    if len(sys.argv) != 3:
        print("Example usage:")
        print("  py-iddiff-git.py ./.git 7fb8df3aed979214165e7c7d28e672966b13a15b")
        sys.exit(1)

    _, repo_path, commit_oid = sys.argv

    repo = pygit2.Repository(repo_path)
    changes = GitCommitDiffStat(repo).iddiff_git(commit_oid)
    print(changes)

    res = Iddiff()
    for change in changes:
        if change.type == CommitFileChangeType.DEL:
            continue

        new_content = TranslationContent(repo.get(change.oid2))
        if change.type == CommitFileChangeType.ADD:
            res.merge(iddiff_against_empty(new_content))
        elif change.type == CommitFileChangeType.MOD:
            old_content = TranslationContent(repo.get(change.oid1))
            res.merge(iddiff_files(old_content, new_content))
Ejemplo n.º 16
0
 def setUp(self):
     self.repo = pygit2.Repository('osv-test')
Ejemplo n.º 17
0
import pygit2

repo = pygit2.Repository(".")
try:
    repo.index.add_all()
    repo.merge(repo.lookup_reference("FETCH_HEAD").target)
    for thing in repo.index.conflicts:
        for thing2 in thing:
            if not thing2 is None:
                print(thing2.path)
            else:
                print("NONE")
        repo.index.add(thing[2])
    repo.index.write()
    repo.create_commit('HEAD', repo.default_signature, repo.default_signature,
                       "AHH43", repo.index.write_tree(), [repo.head.target])
finally:
    repo.state_cleanup()
Ejemplo n.º 18
0
    def mock_clone(self, repo_url, *args, **kwargs):
        if 'osv-test' in repo_url:
            return pygit2.Repository('osv-test')

        return self.original_clone(repo_url, *args, **kwargs)
Ejemplo n.º 19
0
def new_request_pull(repo, username, commitid=None):
    """ Request pulling the changes from the fork into the project.
    """
    reponame = os.path.join(APP.config['FORK_FOLDER'], username, repo + '.git')
    if not os.path.exists(reponame):
        flask.abort(404, 'Project not found')

    parentname = os.path.join(APP.config['GIT_FOLDER'], repo + '.git')
    if not os.path.exists(parentname):
        flask.abort(404, 'Fork not found')

    repo_obj = pygit2.Repository(reponame)
    orig_repo = pygit2.Repository(parentname)

    branchname = flask.request.args.get('branch', 'master')
    branch = orig_repo.lookup_branch(branchname)
    if not branch:
        flask.flash('Branch %s does not exist' % branchname, 'error')
        branchname = 'master'

    if commitid is None:
        commitid = repo_obj.head.target
        if branchname:
            branch = repo_obj.lookup_branch(branchname)
            commitid = branch.get_object().hex

    diff_commits = []
    diffs = []
    if not repo_obj.is_empty and not orig_repo.is_empty:
        orig_commit = orig_repo[orig_repo.lookup_branch(
            branchname).get_object().hex]

        master_commits = [
            commit.oid.hex for commit in orig_repo.walk(
                orig_repo.lookup_branch(branchname).get_object().hex,
                pygit2.GIT_SORT_TIME)
        ]

        repo_commit = repo_obj[commitid]

        for commit in repo_obj.walk(repo_commit.oid.hex, pygit2.GIT_SORT_TIME):
            if commit.oid.hex in master_commits:
                break
            diff_commits.append(commit)
            diffs.append(
                repo_obj.diff(
                    repo_obj.revparse_single(commit.parents[0].oid.hex),
                    repo_obj.revparse_single(commit.oid.hex)))

    elif orig_repo.is_empty:
        orig_commit = None
        repo_commit = repo_obj[repo_obj.head.target]
        diff = repo_commit.tree.diff_to_tree(swap=True)
    else:
        flask.flash('Fork is empty, there are no commits to request pulling',
                    'error')
        return flask.redirect(
            flask.url_for('view_repo', username=username, repo=repo.name))

    html_diffs = []
    for diff in diffs:
        html_diffs.append(
            highlight(diff.patch, DiffLexer(),
                      HtmlFormatter(
                          noclasses=True,
                          style="tango",
                      )))

    form = spechub.ui.forms.RequestPullForm()
    if form.validate_on_submit():
        try:
            if orig_commit:
                orig_commit = orig_commit.oid.hex

            parent = repo

            message = spechub.lib.new_pull_request(
                SESSION,
                repo=parent,
                repo_from=repo,
                branch=branchname,
                title=form.title.data,
                start_id=orig_commit,
                stop_id=repo_commit.oid.hex,
                user=flask.g.fas_user.username,
            )
            SESSION.commit()
            flask.flash(message)

            return flask.redirect(
                flask.url_for('request_pulls', username=None, repo=parent))
        except spechub.exceptions.SpecHubException, err:
            flask.flash(str(err), 'error')
        except SQLAlchemyError, err:  # pragma: no cover
            SESSION.rollback()
            flask.flash(str(err), 'error')
Ejemplo n.º 20
0
    def test_update(self):
        """Test basic update."""
        task_runner = worker.TaskRunner(ndb_client, None, self.tmp_dir.name,
                                        None, None)
        message = mock.Mock()
        message.attributes = {
            'source': 'source',
            'path': 'BLAH-123.yaml',
            'original_hash': 'hash',
        }
        task_runner._source_update(message)

        repo = pygit2.Repository(self.remote_source_repo_path)
        commit = repo.head.peel()

        self.assertEqual('*****@*****.**', commit.author.email)
        self.assertEqual('OSV', commit.author.name)
        self.assertEqual('Update BLAH-123', commit.message)
        diff = repo.diff(commit.parents[0], commit)
        self.assertEqual(self._load_test_data('expected.diff'), diff.patch)

        self.assertDictEqual(
            {
                'additional_commit_ranges':
                [{
                    'fixed_in': 'b9b3fd4732695b83c3068b7b6a14bb372ec31f98',
                    'introduced_in': 'eefe8ec3f1f90d0e684890e810f3f21e8500a4cd'
                }, {
                    'fixed_in': '',
                    'introduced_in': 'febfac1940086bc1f6d3dc33fda0a1d1ba336209'
                }],
                'affected': [],
                'affected_fuzzy': [],
                'confidence':
                None,
                'details':
                'Blah blah blah\nBlah\n',
                'ecosystem':
                'golang',
                'fixed':
                '8d8242f545e9cec3e6d0d2e3f5bde8be1c659735',
                'has_affected':
                False,
                'issue_id':
                None,
                'last_modified':
                datetime.datetime(2021, 1, 1, 0, 0),
                'project':
                'blah.com/package',
                'public':
                None,
                'reference_urls': ['https://ref.com/ref'],
                'regressed':
                'eefe8ec3f1f90d0e684890e810f3f21e8500a4cd',
                'repo_url':
                None,
                'search_indices':
                ['blah.com/package', 'BLAH-123', 'BLAH', '123'],
                'severity':
                'HIGH',
                'sort_key':
                'BLAH-0000123',
                'source_id':
                None,
                'status':
                None,
                'summary':
                'A vulnerability',
                'timestamp':
                None
            },
            osv.Bug.get_by_id('BLAH-123')._to_dict())
Ejemplo n.º 21
0
 def __init__(self, path):
     self._commit_cache = {}
     self.repo = pygit2.Repository(path)
     self._cinnabar_notes = None
Ejemplo n.º 22
0
 def __init__(self, path: Path) -> None:
     self._repo = pygit2.Repository(str(path))
     self.path = path
Ejemplo n.º 23
0
def link_pr_to_ticket(self, session, pr_uid):
    """ Link the specified pull-request against the ticket(s) mentioned in
    the commits of the pull-request

    """
    _log.info('LINK_PR_TO_TICKET: Linking ticket(s) to PR for: %s' % pr_uid)

    request = pagure.lib.get_request_by_uid(session, pr_uid)
    if not request:
        _log.info('LINK_PR_TO_TICKET: Not PR found for: %s' % pr_uid)
        return

    if request.remote:
        repopath = pagure.utils.get_remote_repo_path(request.remote_git,
                                                     request.branch_from)
        parentpath = pagure.utils.get_repo_path(request.project)
    else:
        repo_from = request.project_from
        repopath = pagure.utils.get_repo_path(repo_from)
        parentpath = get_parent_repo_path(repo_from)

    repo_obj = pygit2.Repository(repopath)
    orig_repo = pygit2.Repository(parentpath)

    diff_commits = pagure.lib.git.diff_pull_request(
        session,
        request,
        repo_obj,
        orig_repo,
        requestfolder=pagure_config['REQUESTS_FOLDER'],
        with_diff=False)

    _log.info('LINK_PR_TO_TICKET: Found %s commits in that PR' %
              len(diff_commits))

    name = request.project.name
    namespace = request.project.namespace
    user = request.project.user.user \
        if request.project.is_fork else None

    for line in pagure.lib.git.read_git_lines(
        ['log', '--no-walk'] + [c.oid.hex for c in diff_commits] + ['--'],
            repopath):

        line = line.strip()
        for issue in pagure.lib.link.get_relation(session,
                                                  name,
                                                  user,
                                                  namespace,
                                                  line,
                                                  'fixes',
                                                  include_prs=False):
            _log.info('LINK_PR_TO_TICKET: Link ticket %s to PRs %s' %
                      (issue, request))
            pagure.lib.link_pr_issue(session, issue, request)

        for issue in pagure.lib.link.get_relation(session, name, user,
                                                  namespace, line, 'relates'):
            _log.info('LINK_PR_TO_TICKET: Link ticket %s to PRs %s' %
                      (issue, request))
            pagure.lib.link_pr_issue(session, issue, request)

    try:
        session.commit()
    except SQLAlchemyError:
        _log.exception('Could not link ticket to PR :(')
        session.rollback()
    def test_dumping_reloading_ticket(self, mw, send_email):
        """ Test dumping a ticket into a JSON blob. """
        mw.side_effect = lambda result: result.get()
        send_email.return_value = True

        tests.create_projects(self.session)

        # Create repo
        self.gitrepo = os.path.join(self.path, 'tickets', 'test.git')
        repopath = os.path.join(self.path, 'tickets')
        os.makedirs(self.gitrepo)
        repo_obj = pygit2.init_repository(self.gitrepo, bare=True)

        repo = pagure.get_authorized_project(self.session, 'test')
        # Create an issue to play with
        msg = pagure.lib.new_issue(session=self.session,
                                   repo=repo,
                                   title='Test issue',
                                   content='We should work on this',
                                   user='******',
                                   ticketfolder=repopath)
        self.assertEqual(msg.title, 'Test issue')

        # Need another two issue to test the dependencie chain
        msg = pagure.lib.new_issue(session=self.session,
                                   repo=repo,
                                   title='Test issue #2',
                                   content='Another bug',
                                   user='******',
                                   ticketfolder=repopath)
        self.assertEqual(msg.title, 'Test issue #2')
        msg = pagure.lib.new_issue(session=self.session,
                                   repo=repo,
                                   title='Test issue #3',
                                   content='That would be nice feature no?',
                                   user='******',
                                   ticketfolder=repopath)
        self.assertEqual(msg.title, 'Test issue #3')

        issue = pagure.lib.search_issues(self.session, repo, issueid=1)
        issue2 = pagure.lib.search_issues(self.session, repo, issueid=2)
        issue3 = pagure.lib.search_issues(self.session, repo, issueid=3)

        # Add a couple of comment on the ticket
        msg = pagure.lib.add_issue_comment(
            session=self.session,
            issue=issue,
            comment='Hey look a comment!',
            user='******',
            ticketfolder=repopath,
        )
        self.session.commit()
        self.assertEqual(msg, 'Comment added')
        msg = pagure.lib.add_issue_comment(
            session=self.session,
            issue=issue,
            comment='crazy right?',
            user='******',
            ticketfolder=repopath,
        )
        self.session.commit()
        self.assertEqual(msg, 'Comment added')
        # Assign the ticket to someone
        msg = pagure.lib.add_issue_assignee(
            session=self.session,
            issue=issue,
            assignee='pingou',
            user='******',
            ticketfolder=repopath,
        )
        self.session.commit()
        self.assertEqual(msg, 'Issue assigned to pingou')
        # Add a couple of tags on the ticket
        msg = pagure.lib.add_tag_obj(
            session=self.session,
            obj=issue,
            tags=[' feature ', 'future '],
            user='******',
            gitfolder=repopath,
        )
        self.session.commit()
        self.assertEqual(msg, 'Issue tagged with: feature, future')
        # Add dependencies
        msg = pagure.lib.add_issue_dependency(
            session=self.session,
            issue=issue,
            issue_blocked=issue2,
            user='******',
            ticketfolder=repopath,
        )
        self.session.commit()
        self.assertEqual(msg, 'Issue marked as depending on: #2')
        msg = pagure.lib.add_issue_dependency(
            session=self.session,
            issue=issue3,
            issue_blocked=issue,
            user='******',
            ticketfolder=repopath,
        )
        self.session.commit()
        self.assertEqual(msg, 'Issue marked as depending on: #1')

        # Dump the JSON
        pagure.lib.git.update_git(issue, repo, repopath).wait()
        repo = pygit2.Repository(self.gitrepo)
        cnt = len([
            commit for commit in repo.walk(repo.head.target,
                                           pygit2.GIT_SORT_TOPOLOGICAL)
        ])
        self.assertIn(cnt, (9, 10))

        last_commit = repo.revparse_single('HEAD')
        patch = pagure.lib.git.commit_to_patch(repo, last_commit)
        for line in patch.split('\n'):
            if line.startswith('--- a/'):
                fileid = line.split('--- a/')[1]
                break

        newpath = tempfile.mkdtemp(prefix='pagure-dump-load')
        clone_repo = pygit2.clone_repository(self.gitrepo, newpath)

        self.assertEqual(len(os.listdir(newpath)), 4)

        ticket_json = os.path.join(self.path, 'test_ticket.json')
        self.assertFalse(os.path.exists(ticket_json))
        shutil.copyfile(os.path.join(newpath, fileid), ticket_json)
        self.assertTrue(os.path.exists(ticket_json))
        jsondata = None
        with open(ticket_json) as stream:
            jsondata = json.load(stream)
        self.assertNotEqual(jsondata, None)

        shutil.rmtree(newpath)

        # Test reloading the JSON
        self.tearDown()
        self.setUp()
        # Give the worker time to spawn
        time.sleep(2)
        tests.create_projects(self.session)

        # Create repo
        self.gitrepo = os.path.join(self.path, 'tickets', 'test.git')
        repopath = os.path.join(self.path, 'tickets')
        os.makedirs(self.gitrepo)
        pygit2.init_repository(self.gitrepo, bare=True)

        pagure.lib.git.update_ticket_from_git(
            self.session,
            reponame='test',
            namespace=None,
            username=None,
            issue_uid='foobar',
            json_data=jsondata,
            agent='pingou',
        )

        # Post loading
        repo = pagure.get_authorized_project(self.session, 'test')
        self.assertEqual(len(repo.issues), 1)
        issue = pagure.lib.search_issues(self.session, repo, issueid=1)

        # Check after re-loading
        self.assertEqual(len(issue.comments), 3)
        self.assertEqual(len(issue.tags), 2)
        self.assertEqual(issue.tags_text, ['future', 'feature'])
        self.assertEqual(issue.assignee.username, 'pingou')
        self.assertEqual(issue.children, [])
        self.assertEqual(issue.parents, [])
        self.assertEqual(issue.status, 'Open')
    def set_up_git_repo(self, new_project=None, branch_from='feature'):
        """ Set up the git repo and create the corresponding PullRequest
        object.
        """

        # Create a git repo to play with
        gitrepo = os.path.join(self.path, 'repos', 'test.git')
        repo = pygit2.init_repository(gitrepo, bare=True)

        repopath = os.path.join(self.newpath, 'test')
        clone_repo = pygit2.clone_repository(gitrepo, repopath)

        # Create a file in that git repo
        with open(os.path.join(repopath, 'sources'), 'w') as stream:
            stream.write('foo\n bar')
        clone_repo.index.add('sources')
        clone_repo.index.write()

        try:
            com = repo.revparse_single('HEAD')
            prev_commit = [com.oid.hex]
        except:
            prev_commit = []

        # Commits the files added
        tree = clone_repo.index.write_tree()
        author = pygit2.Signature(
            'Alice Author', '*****@*****.**')
        committer = pygit2.Signature(
            'Cecil Committer', '*****@*****.**')
        clone_repo.create_commit(
            'refs/heads/master',  # the name of the reference to update
            author,
            committer,
            'Add sources file for testing',
            # binary string representing the tree object ID
            tree,
            # list of binary strings representing parents of the new commit
            prev_commit
        )
        time.sleep(1)
        refname = 'refs/heads/master:refs/heads/master'
        ori_remote = clone_repo.remotes[0]
        PagureRepo.push(ori_remote, refname)

        first_commit = repo.revparse_single('HEAD')

        with open(os.path.join(repopath, '.gitignore'), 'w') as stream:
            stream.write('*~')
        clone_repo.index.add('.gitignore')
        clone_repo.index.write()

        # Commits the files added
        tree = clone_repo.index.write_tree()
        author = pygit2.Signature(
            'Alice Äuthòr', 'alice@äuthòrs.tld')
        committer = pygit2.Signature(
            'Cecil Cõmmîttër', 'cecil@cõmmîttërs.tld')
        clone_repo.create_commit(
            'refs/heads/master',
            author,
            committer,
            'Add .gitignore file for testing',
            # binary string representing the tree object ID
            tree,
            # list of binary strings representing parents of the new commit
            [first_commit.oid.hex]
        )
        refname = 'refs/heads/master:refs/heads/master'
        ori_remote = clone_repo.remotes[0]
        PagureRepo.push(ori_remote, refname)

        # Set the second repo

        new_gitrepo = repopath
        if new_project:
            # Create a new git repo to play with
            new_gitrepo = os.path.join(self.newpath, new_project.fullname)
            if not os.path.exists(new_gitrepo):
                os.makedirs(new_gitrepo)
                new_repo = pygit2.clone_repository(gitrepo, new_gitrepo)

        repo = pygit2.Repository(new_gitrepo)

        # Edit the sources file again
        with open(os.path.join(new_gitrepo, 'sources'), 'w') as stream:
            stream.write('foo\n bar\nbaz\n boose')
        repo.index.add('sources')
        repo.index.write()

        # Commits the files added
        tree = repo.index.write_tree()
        author = pygit2.Signature(
            'Alice Author', '*****@*****.**')
        committer = pygit2.Signature(
            'Cecil Committer', '*****@*****.**')
        repo.create_commit(
            'refs/heads/%s' % branch_from,
            author,
            committer,
            'A commit on branch %s' % branch_from,
            tree,
            [first_commit.oid.hex]
        )
        refname = 'refs/heads/%s' % (branch_from)
        ori_remote = repo.remotes[0]
        PagureRepo.push(ori_remote, refname)
Ejemplo n.º 26
0
def get_last_commits(count=5):
    repo = pygit2.Repository(".git")
    commits = list(
        itertools.islice(
            repo.walk(repo.head.target, pygit2.GIT_SORT_TOPOLOGICAL), count))
    return "\n".join(format_commit(c) for c in commits)
Ejemplo n.º 27
0
 def test_unicode_string(self):
     # String is unicode because of unicode_literals
     repo_path = './test/data/testrepo.git/'
     pygit2.Repository(repo_path)
Ejemplo n.º 28
0
 def setUp(self):
     super().setUp()
     self.repo_ctxtmgr = TemporaryRepository(self.repo_spec)
     self.repo_path = self.repo_ctxtmgr.__enter__()
     self.repo = pygit2.Repository(self.repo_path)
Ejemplo n.º 29
0
def test_restore(source, pathspec, data_working_copy, cli_runner, geopackage):
    with data_working_copy("points", force_new=True) as (repo_dir, wc):
        layer = H.POINTS.LAYER
        pk_field = H.POINTS.LAYER_PK
        rec = H.POINTS.RECORD
        sql = H.POINTS.INSERT
        del_pk = 5
        upd_field = "t50_fid"
        upd_field_value = 888_888
        upd_pk_range = (10, 15)
        id_chg_pk = 20

        db = geopackage(wc)
        repo = pygit2.Repository(str(repo_dir))

        # make some changes
        with db:
            cur = db.cursor()
            cur.execute(
                f"UPDATE {H.POINTS.LAYER} SET fid=30000 WHERE fid=300;")
            assert db.changes() == 1

        r = cli_runner.invoke(["commit", "-m", "test1"])
        assert r.exit_code == 0, r

        new_commit = repo.head.peel(pygit2.Commit).hex
        assert new_commit != H.POINTS.HEAD_SHA
        print(f"Original commit={H.POINTS.HEAD_SHA} New commit={new_commit}")

        with db:
            cur = db.cursor()
            try:
                cur.execute(sql, rec)
            except apsw.Error:
                print(sql, rec)
                raise
            assert db.changes() == 1

            cur.execute(f"DELETE FROM {layer} WHERE {pk_field} < {del_pk};")
            assert db.changes() == 4
            cur.execute(
                f"UPDATE {layer} SET {upd_field} = ? WHERE {pk_field}>=? AND {pk_field}<?;",
                [upd_field_value, upd_pk_range[0], upd_pk_range[1]],
            )
            assert db.changes() == 5
            cur.execute(
                f"UPDATE {layer} SET {pk_field}=? WHERE {pk_field}=?;",
                [9998, id_chg_pk],
            )
            assert db.changes() == 1

            changes_pre = [
                r[0] for r in cur.execute(
                    'SELECT pk FROM ".sno-track" ORDER BY CAST(pk AS INTEGER);'
                )
            ]
            # .sno-track stores pk as strings
            assert changes_pre == [
                "1",
                "2",
                "3",
                "4",
                "10",
                "11",
                "12",
                "13",
                "14",
                "20",
                "9998",
                "9999",
            ]

        # using `sno restore
        r = cli_runner.invoke(["restore"] + source + pathspec)
        assert r.exit_code == 0, r

        changes_post = [
            r[0] for r in cur.execute(
                'SELECT pk FROM ".sno-track" ORDER BY CAST(pk AS INTEGER);')
        ]

        cur.execute(
            f"""SELECT value FROM ".sno-meta" WHERE key = 'tree' AND table_name='*';"""
        )
        head_sha = cur.fetchone()[0]

        if pathspec:
            # we restore'd paths other than our test dataset, so all the changes should still be there
            assert changes_post == changes_pre

            if head_sha != new_commit:
                print(f"E: Bad Tree? {head_sha}")

            return

        if source:
            assert changes_post == ["300", "30000"]

            if head_sha != H.POINTS.HEAD_SHA:
                print(f"E: Bad Tree? {head_sha}")

            cur.execute(
                f"SELECT {pk_field} FROM {layer} WHERE {pk_field} = 300;")
            if not cur.fetchone():
                print("E: Previous PK bad? ({pk_field}=300)")
            return

        assert changes_post == []

        if head_sha != new_commit:
            print(f"E: Bad Tree? {head_sha}")

        cur.execute(
            f"""SELECT value FROM ".sno-meta" WHERE key = 'tree' AND table_name='*';"""
        )
        head_sha = cur.fetchone()[0]
        if head_sha != new_commit:
            print(f"E: Bad Tree? {head_sha}")

        cur.execute(f"SELECT {pk_field} FROM {layer} WHERE {pk_field}=?;",
                    [rec[pk_field]])
        if cur.fetchone():
            print(
                "E: Newly inserted row is still there ({pk_field}={rec[pk_field]})"
            )
        cur.execute(f"SELECT COUNT(*) FROM {layer} WHERE {pk_field} < ?;",
                    [del_pk])
        if cur.fetchone()[0] != 4:
            print("E: Deleted rows {pk_field}<{del_pk} still missing")
        cur.execute(
            f"SELECT COUNT(*) FROM {layer} WHERE {upd_field} = ?;",
            [upd_field_value],
        )
        if cur.fetchone()[0] != 0:
            print("E: Updated rows not reset")
        cur.execute(f"SELECT {pk_field} FROM {layer} WHERE {pk_field} = 9998;")
        if cur.fetchone():
            print(
                "E: Updated pk row is still there ({pk_field}={id_chg_pk} -> 9998)"
            )
        cur.execute(f"SELECT {pk_field} FROM {layer} WHERE {pk_field} = ?;",
                    [id_chg_pk])
        if not cur.fetchone():
            print("E: Updated pk row is missing ({pk_field}={id_chg_pk})")

        cur.execute(f"SELECT {pk_field} FROM {layer} WHERE {pk_field} = 300;")
        if not cur.fetchone():
            print("E: Previous PK bad? ({pk_field}=300)")
Ejemplo n.º 30
0
def process_tags(ctx, prl):
    """
    For each tag reference in the pre-receive-tuple list, add or remove the
    object from the Git Fusion mirror.

    Arguments:
        ctx - P4GF context with initialized pygit2 Repository.
        prl - list of PreReceiveTuple objects.

    Returns None if successful and an error string otherwise.
    """
    tags = [prt for prt in prl if prt.ref.startswith('refs/tags/')]
    if not tags:
        LOG.debug("process_tags() no incoming tags to process")
        return None

    # Screen the tags to ensure their names won't cause problems sometime
    # in the future (i.e. when we create Perforce labels). Several of these
    # characters are not allowed in Git tag names anyway, but better to
    # check in case that changes in the future.
    # In particular git disallows a leading '-', but we'll check for it anyway
    # Otherwise allow internal '-'
    regex = re.compile(r'[*@#,]|\.\.\.|%%')
    for prt in tags:
        tag = prt.ref[10:]
        if regex.search(tag) or tag.startswith('-'):
            return _(
                "illegal characters (@#*,...%%) in tag name: '{}'").format(tag)

    if not ctx.view_repo:
        # In some cases the Git repository object is not yet created.
        ctx.view_repo = pygit2.Repository(ctx.view_dirs.GIT_DIR)

    LOG.debug("process_tags() beginning...")
    tags_path = "objects/repos/{repo}/tags/...".format(
        repo=ctx.config.view_name)
    with ctx.p4gf.at_exception_level(P4.P4.RAISE_NONE):
        # Raises an exception when there are no files to sync?
        ctx.p4gfrun([
            'sync', '-q', "//{}/{}/...".format(ctx.config.p4client_gf,
                                               tags_path)
        ])

    # Decide what to do with the tag references.
    tags_to_delete = []
    tags_to_add = []
    tags_to_edit = []
    for prt in tags:
        tag = prt.ref[10:]
        if prt.old_sha1 == p4gf_const.NULL_COMMIT_SHA1:
            if prt.new_sha1 == p4gf_const.NULL_COMMIT_SHA1:
                # No idea how this happens, but it did, so guard against it.
                sys.stderr.write(
                    _('Ignoring double-zero pre-receive-tuple line'))
                continue
            # Adding a new tag; if it references a commit, check that it
            # exists; for other types, it is too costly to verify
            # reachability from a known commit, so just ignore them.
            obj = _get_tag_target(ctx.view_repo, prt.new_sha1)
            is_commit = obj.type == pygit2.GIT_OBJ_COMMIT
            if is_commit and not ObjectType.commits_for_sha1(ctx, obj.hex):
                return _("Tag '{}' references unknown objects."
                         " Push commits before tags.").format(tag)
            if obj.type == pygit2.GIT_OBJ_TREE:
                sys.stderr.write(
                    _("Tag '{}' of tree will not be stored in Perforce\n").
                    format(tag))
                continue
            if obj.type == pygit2.GIT_OBJ_BLOB:
                sys.stderr.write(
                    _("Tag '{}' of blob will not be stored in Perforce\n").
                    format(tag))
                continue
            _add_tag(ctx, tag, prt.new_sha1, tags_to_edit, tags_to_add)
        elif prt.new_sha1 == p4gf_const.NULL_COMMIT_SHA1:
            # Removing an existing tag
            _remove_tag(ctx, tag, prt.old_sha1, tags_to_edit, tags_to_delete)
        else:
            # Older versions of Git allowed moving a tag reference, while
            # newer ones seemingly do not. We will take the new behavior as
            # the correct one and reject such changes.
            return _(
                'Updates were rejected because the tag already exists in the remote.'
            )

    # Seemingly nothing to do.
    if not tags_to_add and not tags_to_edit and not tags_to_delete:
        LOG.debug("process_tags() mysteriously came up empty")
        return None

    # Add and remove tags as appropriate, doing so in batches.
    LOG.info("adding {} tags, removing {} tags from Git mirror".format(
        len(tags_to_add), len(tags_to_delete)))
    desc = _("Git Fusion '{repo}' tag changes").format(
        repo=ctx.config.view_name)
    with p4gf_util.NumberedChangelist(gfctx=ctx, description=desc) as nc:
        while len(tags_to_add):
            bite = tags_to_add[:_BITE_SIZE]
            tags_to_add = tags_to_add[_BITE_SIZE:]
            ctx.p4gfrun(["add", "-t", "binary+F", bite])
        while len(tags_to_edit):
            bite = tags_to_edit[:_BITE_SIZE]
            tags_to_edit = tags_to_edit[_BITE_SIZE:]
            ctx.p4gfrun(["edit", "-k", bite])
        while len(tags_to_delete):
            bite = tags_to_delete[:_BITE_SIZE]
            tags_to_delete = tags_to_delete[_BITE_SIZE:]
            ctx.p4gfrun(["delete", bite])
        nc.submit()
        if nc.submitted:
            _write_last_copied_tag(ctx, nc.change_num)
    LOG.debug("process_tags() complete")
    return None