Ejemplo n.º 1
0
def log(_, name, git_range, resolve=False):
    """
    Search a release from the commit hash.
    """
    repo = utils.git_repo()

    client = utils.s3_client()

    lx, _, rx = git_range.partition("..")

    def resolve_range(ref):
        if ref.startswith("v") and ref[1:].isdigit():
            release = get_release(client, name, int(ref[1:]))

            return release.commit

        return ref

    start = resolve_range(lx)
    end = resolve_range(rx)

    if resolve:
        text = f"{start}...{end}"

    else:
        text = utils.changelog(repo, git.Oid(hex=end), git.Oid(hex=start)).text

    print(text)
Ejemplo n.º 2
0
def to_hg(git_oid):
    global _git_to_hg_map

    if _git_to_hg_map is None and hg_map_file:
        _git_to_hg_map = {}
        print('Indexing mercurial map file...')
        with open(hg_map_file) as f:
            for line in f.readlines():
                (git_rev, hg_rev) = line.strip().split()
                _git_to_hg_map[pygit2.Oid(hex=git_rev)] = hg_rev

    if _git_to_hg_map is not None:
        ref = _git_to_hg_map.get(git_oid)
        if ref:
            return ref

    if use_cinnabar:
        # TODO: Maybe there's a way to not spawn processes here hooking into cinnabar's python code?
        # But it's not clear this is necessarily a perf issue...
        ref = subprocess.check_output(
            ["git", "cinnabar", "git2hg",
             str(git_oid)], cwd=old_path).strip()
        if len(ref) != 0 and ref != ("0" * len(ref)):
            return ref

    return None
Ejemplo n.º 3
0
def archive_repository(archive_path, url, github_slug, time_series, token):
    ''' Download previous versions of the chosen repository at the specified
	intervals and commit these to slave account for future analysis '''
    # archive_path = create_archive(url)
    time_intervals = get_time_intervals(time_series)

    for timestamp in time_intervals:
        sha = get_sha(github_slug, timestamp, token)

        if sha:
            oid = pygit2.Oid(hex=sha)
            download_path = os.path.join(archive_path, str(timestamp))

            try:
                repo = pygit2.clone_repository(url, download_path, bare=False)
                init_repo(oid, download_path)
                u_name = github_slug.split('/')[1] + str(timestamp).replace(
                    '-', '')
                bare_url = create_bare_repo(u_name)
                if bare_url:
                    commit_files(bare_url, download_path)
            except Exception as ex:
                return str(type(ex).__name__)
                # return '{}: {}'.format(type(ex).__name__, ex.args[0])
                # os.system('rmdir /S /Q "{}"'.format(archive_path))
        else:
            return 'Error: Could not access commit history for given time period'.format(
                url)
            # os.system('rmdir /S /Q "{}"'.format(archive_path))
    return 'Successfully archived {}'.format(url)
Ejemplo n.º 4
0
 def rev(self, node):
     if isinstance(node, bytes):
         return OidProxy(pygit2.Oid(raw=node))
     elif isinstance(node, pygit2.Oid):
         return OidProxy(node)
     else:
         raise TypeError(node)
Ejemplo n.º 5
0
    def compare_patches_in_range(self, patch_rev, times=1, target_commit=None):
        curr_repo_path, _ = self.get_repo_paths()
        curr_repo = self.get_repo(curr_repo_path, patch_rev)

        distance_to_target = 1
        if target_commit:
            target = curr_repo.revparse_single(target_commit)
            for commit in curr_repo.walk(curr_repo.head.target,
                                         pygit2.GIT_SORT_TOPOLOGICAL):
                if commit.hex == target.hex:
                    break
                distance_to_target += 1

        if target_commit:
            times = distance_to_target

        for i in range(times):
            head = 'HEAD~'
            prev = curr_repo.revparse_single(head + str(i + 1))
            curr = curr_repo.revparse_single(head + str(i))

            OutputManager.print("Comparing with previous commit: " + prev.hex)

            diff = curr_repo.diff(prev, curr, context_lines=0)
            diff_summary = self.compute_diffs(diff, curr.hex)
            OutputManager.print_relevant_diff(diff_summary, self.print_mode)

        # Restore repo to original state
        master = curr_repo.branches['master']
        hash_oid = pygit2.Oid(hex=master.upstream.target.hex)
        curr_repo.reset(hash_oid, pygit2.GIT_RESET_HARD)
        OutputManager.print('Restored to %s' %
                            (curr_repo.revparse_single('HEAD').hex, ))
Ejemplo n.º 6
0
def test_set_head(barerepo):
    # Test setting a detatched HEAD.
    barerepo.set_head(pygit2.Oid(hex=PARENT_SHA))
    assert barerepo.head.target.hex == PARENT_SHA
    # And test setting a normal HEAD.
    barerepo.set_head("refs/heads/master")
    assert barerepo.head.name == "refs/heads/master"
    assert barerepo.head.target.hex == HEAD_SHA
Ejemplo n.º 7
0
def index_mercurial(map_file):
    f = open(map_file)
    m = {}
    for line in f.readlines():
        (git_rev, hg_rev) = line.strip().split()
        m[pygit2.Oid(hex=git_rev)] = hg_rev

    return m
Ejemplo n.º 8
0
def ensure_oid(something):
    ''' Return an pygit2.Oid for an unknown variable type.
    
    something:  a representation of a pygit2.Oid
    
    Raises a GitDictError, if conversion fails
    '''
    if isinstance(something, pygit2.Oid):
        return something
    try:
        if isinstance(something, str):
            return pygit2.Oid(hex=something)
        if isinstance(something, bytes):
            return pygit2.Oid(raw=something)
        if isinstance(something.id, pygit2.Oid):
            return something.id
    except (ValueError, AttributeError, TypeError):
        pass
    raise GitDictError('Unconvertable Oid: ' + repr(something))
Ejemplo n.º 9
0
        def __init__(self):
            self.progress = progressbar.ProgressBar()

            self.started_transfer = False
            self.transfer_done = False

            self.started_deltas = False
            self.deltas_done = False

            self.null_oid = pygit2.Oid(
                hex='0000000000000000000000000000000000000000')
Ejemplo n.º 10
0
def release_contains(repo: git.Repository, release: Release,
                     commit_oid: git.Oid, name: str):
    release_oid = git.Oid(hex=release.commit)
    try:
        in_release = utils.commit_contains(repo, release_oid, commit_oid)
    except git.GitError as e:
        LOG.warning(
            f"Repo: [{repo.workdir}], Error: [{repr(e)}], Project: [{name}]")
        in_release = "?"

    return in_release
Ejemplo n.º 11
0
 def merge_base(self, oid1, oid2):
     # FIXME: Overridden to work around https://github.com/koordinates/kart/issues/555
     # Caused by https://github.com/libgit2/libgit2/issues/6123
     try:
         args = ["git", "-C", self.path, "merge-base", str(oid1), str(oid2)]
         output = subprocess.check_output(args,
                                          encoding="utf8",
                                          env=tool_environment())
         return pygit2.Oid(hex=output.strip())
     except subprocess.CalledProcessError:
         return None
Ejemplo n.º 12
0
def index_existing():
    try:
        new_repo.head.target
    except:
        return {}

    blame_map = {}
    for commit in new_repo.walk(new_repo.head.target):
        orig = pygit2.Oid(hex=commit.message.split()[1])
        blame_map[orig] = commit

    return blame_map
Ejemplo n.º 13
0
def git_log(repo, *, start=None, end=None):
    # pylint: disable=no-member
    start = git.Oid(hex=start) if start else repo.head.target

    for commit in repo.walk(start, git.GIT_SORT_TOPOLOGICAL):
        yield commit

        if commit.hex == end:
            break

    else:
        if end is not None:
            raise InvalidRange()
Ejemplo n.º 14
0
def index_existing():
    ref = None
    try:
        ref = new_repo.lookup_reference(blame_ref).resolve()
    except:
        # ref doesn't exist yet, so nothing to index
        return {}

    blame_map = {}
    for commit in new_repo.walk(ref.target):
        orig = pygit2.Oid(hex=commit.message.split()[1])
        blame_map[orig] = commit

    return blame_map
Ejemplo n.º 15
0
def release_contains(repo: git.Repository, release: Release,
                     commit_oid: git.Oid, name: str):
    if not release.commit:
        utils.warning(f"{name} has a null commit ref\n")
        return "?"

    release_oid = git.Oid(hex=release.commit)
    try:
        in_release = utils.commit_contains(repo, release_oid, commit_oid)
    except utils.CommitNotFound as e:
        utils.warning(f"Error: [{repr(e)}], Project: [{name}]\n")
        in_release = "?"

    return in_release
Ejemplo n.º 16
0
 def run(self):
     """
     The process gets a commit out of the queue and processes it.
     We use the poisonous pill technique here. Means, our queue has #Processes times "None" in it in the end.
     If a process encounters that None, he will stop and terminate.
     """
     while True:
         next_task = self.queue.get()
         # If process pulls the poisoned pill, he exits
         if next_task is None:
             self.queue.task_done()
             break
         commitHash = pygit2.Oid(hex=next_task)
         commit = self.repository[commitHash]
         self.parse_commit(commit)
         self.queue.task_done()
     return
Ejemplo n.º 17
0
    def versioned_file_at_path(self, path, version_hash=None):
        """return the contents of the file at path at the version of version_hash.
        if version_hash is None, the newest will be returned"""
        if not version_hash:
            with open(os.path.join(self.folder_path(), path), encoding='utf-8') as file:
                return file.read()

        self.family.ensure_source_folder_exists()
        commit = self.family._repo().get(pygit2.Oid(hex=version_hash))
        tree = commit.tree
        relative_path = os.path.join(self.folder_path(), path)[len(self.family.source_folder_path()) + 1:]
        try:
            for entry in relative_path.split('/'):
                tree = self.family._repo().get(tree[entry].id)
        except KeyError:
            raise FileNotFoundError()

        return tree.data
Ejemplo n.º 18
0
def archive_repository(download_folder, url, github_slug, timestamp, token):
    ''' Download previous versions of the chosen repository at the specified
	intervals and commit these to slave account for future analysis '''
    sha = get_sha(github_slug, timestamp, token)

    if sha:
        oid = pygit2.Oid(hex=sha)
        try:
            repo = pygit2.clone_repository(url, download_folder, bare=False)
            init_repo(oid, download_folder)
            u_name = github_slug.split('/')[1] + str(timestamp).replace(
                '-', '')
            bare_url = create_bare_repo(u_name)
            if bare_url:
                commit_files(bare_url, download_folder)
        except Exception as ex:
            os.rmdir(download_folder)
            return False
    else:
        return False

    return True  # Successfully archived
Ejemplo n.º 19
0
    def read_from_repo(cls, repo):
        # HEAD is assumed to be our side of the merge. MERGE_HEAD (and MERGE_INDEX)
        # are not version controlled, but are simply files in the repo. For these
        # reasons, the user should not be able to change branch mid merge.

        head = CommitWithReference.resolve(repo, "HEAD")
        ours_commit_id = head.id
        theirs_commit_id = pygit2.Oid(hex=read_repo_file(repo, MERGE_HEAD).strip())

        commit_ids3 = AncestorOursTheirs(
            # We find the ancestor by recalculating it fresh each time.
            repo.merge_base(ours_commit_id, theirs_commit_id),
            ours_commit_id,
            theirs_commit_id,
        )
        short_ids3 = commit_ids3.map(lambda c: repo[c].short_id)
        branches3 = AncestorOursTheirs(
            None,
            head.branch_shorthand,
            read_repo_file(repo, MERGE_BRANCH, missing_ok=True, strip=True),
        )

        return cls._zip_together(repo, commit_ids3, short_ids3, branches3)
Ejemplo n.º 20
0
    def __init__(self, repository_path, commit_id=None, branch_name=None):
        try:
            path = pygit2.discover_repository(repository_path)
        except KeyError:
            raise GitError(
                'no repository found in "{}"'.format(repository_path))
        self.repo = repo = pygit2.Repository(path)

        if branch_name is not None and commit_id is not None:
            raise ValueError(
                'only one of branch_name and commit_id should be set')

        self.branch = None
        if branch_name is None and commit_id is None:
            parents = []
        elif branch_name is not None:
            self.branch = branch_name
            branch_reference = get_branch_reference(self.branch)
            try:
                commit_oid = self.repo.lookup_reference(
                    branch_reference).target
            except KeyError:
                parents = []
            else:
                parents = [commit_oid]
        else:
            commit_oid = pygit2.Oid(hex=commit_id)
            parents = [commit_oid]

        self.parents = parents
        tree = []
        if parents:
            tree = repo[parents[0]].tree
        self.memory_tree = MemoryTree(tree, {}, {})

        self.has_changes = False
        self.messages = []
Ejemplo n.º 21
0
def new(
    ctx,
    name,
    commit=None,
    version=None,
    dry=False,
    yes=False,
    image_name=None,
    image_id=None,
    rollback=False,
    filter_files_path=None,
    profile=None,
):
    """
    Create a new release.
    """
    repo = utils.git_repo()

    client = utils.s3_client(profile)
    latest = next(get_releases(client, name), None)
    latest_oid = git.Oid(hex=latest.commit) if latest else None

    if commit is None:
        commit = "HEAD"

    commit_oid = utils.revparse(repo, commit)

    if version is None:
        # create next version
        version = 1 if latest is None else latest.version + 1

    else:
        version = int(version)

    if image_id is None:
        image_id = _get_image_id(ctx,
                                 commit_oid,
                                 name=name,
                                 image_name=image_name)

        if image_id is None:
            utils.fatal("Image not found")

    keep_only_files = None
    if filter_files_path:
        with open(filter_files_path) as fp:
            keep_only_files = [line.strip() for line in fp]

    changelog = utils.changelog(repo,
                                commit_oid,
                                latest_oid,
                                keep_only_files=keep_only_files)

    action_type = ActionType.automated if config.IS_CONCOURSE else ActionType.manual

    release = Release(
        version=version,
        commit=commit_oid.hex,
        changelog=changelog.short_text,
        version_id="",
        image=image_id,
        timestamp=datetime.now(),
        author=utils.get_author(repo, commit_oid),
        rollback=changelog.rollback,
        action_type=action_type,
        commits=[commit.hex for commit in changelog.logs],
    )

    utils.printfmt(release)

    if dry:
        return

    if release.rollback:
        utils.warning("This is a rollback! :warning:\n")

        if not rollback:
            utils.warning("Missing flag --rollback\n")
            utils.fatal("Aborted!")

    if not yes:

        if release.rollback:
            ok = utils.confirm(
                "Are you sure you want to create a rollback release?",
                style=utils.TextStyle.yellow,
            )

            if not ok:
                utils.fatal("Aborted!")

        ok = utils.confirm("Are you sure you want to create this release?")
        if not ok:
            utils.fatal("Aborted!")

    put_release(client, _get_bucket(), name, release)

    utils.success("Created new release :tada:\n")
def git_commit(req_handler):
    "Recieve commit, branch off, commit, merge back to master."
    ### Security methods to impelemt:
    ###    (a)  Size of files limit
    ###    (b)  commit only once per second

    req = req_handler.request
    if (req.method != "POST"):
        raise HTTPError(400, "Expecting POST method with JSON payload")
    try:
        commit = tornado.escape.json_decode(req.body)
        author = commit["working_copy"]["author"]
        message = commit["working_copy"]["commit_msg"]
        base_commit_id = commit["working_copy"]["base_commit"]["id"]
        files = {
            k: commit["files"][k]
            for k in config.files.keys() if k in commit["files"]
        }
    except ValueError:
        raise HTTPError(400, "Invalid JSON input")

    if not author or not author_legal(author):
        raise HTTPError(400,
                        "Please specify a valid author (%s given)" % author)
    if not base_commit_id or not re.match(r"[a-z0-9]{4,40}",
                                          base_commit_id):  # need to be hex
        raise HTTPError(400,
                        "Malformed base commit id, it's not a git sha1 ref.")
    if os.path.exists(workdir_path(author)):
        raise HTTPError(
            400,
            "Author '%s' worktree exists, please try again later" % author)
    wt = master.add_worktree(
        author,
        workdir_path(author))  # will also create branch called <author>
    # wt = master.lookup_worktree(author)
    work = pygit2.Repository(wt.path)

    # basically do git checkout <base_commit_id> a la libgit2
    oid = pygit2.Oid(hex=base_commit_id)
    try:
        work.set_head(oid)
    except KeyError:
        raise HTTPError(400, "Nonexisting base commit given")
    work.checkout_index()
    # now work is in detached state if oid is not HEAD.

    for filekey, filename in config.files.items():
        if not filekey in files: continue
        write_json_file(workdir_file(author, filename), files[filekey])

    ## the cumbersome "git add" from pygit2
    # treeBuilder = workrepo.TreeBuilder()
    # for fn in repo_files.values():
    #	blob = workrepo.create_blob_fromworkdir(fn)
    #	treeBuilder.insert(fn, blob, os.stat(workdir_file(author, fn)).st_mode)
    # tree = treeBuilder.write()

    # Alternatively by maintaining the index
    work.index.read()
    #map(work.index.add, config.files.values())
    work.index.add_all()
    work.index.write()
    tree = work.index.write_tree()

    # the actual commit
    sign = signature(author)
    message += "\n\nCommitted-via: t29-inventory-webserver"
    work_commit = work.create_commit("HEAD", sign, sign, message, tree,
                                     [work.head.target])

    # for whatever reason, since I'm detached now, have to move manually...
    work_branch = master.lookup_branch(author)
    work_branch.set_target(work_commit)

    ## This basically works, but leaves master in a state which says
    ##   "All conflicts fixed but you are still merging."
    if False:
        # after every edit, merge work to the master.
        master.merge(work.head.target)
        # do not use the created index but do our own one, with a "theirs" merge strategy
        # Doesn't work, however. Unfortunately.
        #index = master.merge_commits(master.head.target, work.head.target, favor="theirs")
        #tree = index.write_tree()
        tree = master.index.write_tree()
        master_commit = master.create_commit(
            "HEAD", sign, sign, "Merging\n" + message, tree,
            [master.head.target, work.head.target])

    # Fall back to good old shell in order to sync the branches
    ret = extgit("merge", "--no-edit", "-q", author, "-X", "theirs", "-m",
                 "Merging by overwriting from branch %s." % author)
    if ret:
        raise HTTPError(
            400, "Could not merge git branch %s to master (status %d)" %
            (author, ret))
    # okay, whyever now the merge leaves us in an ugly state...
    extgit("rebase", "HEAD", "master")

    # TODO: Instead of dumb overwriting, should implement the following strategy:
    #   1. Try to do a git merge, check if result is readable JSON. If so, nice.
    #   2. If not, fallback to git merge -X theirs.

    # Cleanup
    shutil.rmtree(workdir_path(author))
    wt.prune(True)  # force=True neccessary to delete it
    work_branch.delete()

    print("Committed from user %s" % author)
    return {"Result": "Success"}  # not really processed anyway
Ejemplo n.º 23
0
def upgrade(ctx, source, dest):
    """
    Upgrade a repository for an earlier version of Sno to be compatible with the latest version.
    The current repository structure of Sno is known as Datasets V2, which is used from Sno 0.5 onwards.
    """
    source = Path(source)
    dest = Path(dest)

    if dest.exists():
        raise click.BadParameter(f"'{dest}': already exists",
                                 param_hint="DEST")

    source_repo = pygit2.Repository(str(source))
    if not source_repo or not source_repo.is_bare:
        raise click.BadParameter(f"'{source}': not an existing repository",
                                 param_hint="SOURCE")

    source_version = get_repo_version(source_repo)
    if source_version == 2:
        raise InvalidOperation(
            "Cannot upgrade: source repository is already at latest version (Datasets V2)"
        )

    if source_version not in (0, 1):
        raise InvalidOperation(
            "Unrecognised source repository version: {source_version}")

    # action!
    click.secho(f"Initialising {dest} ...", bold=True)
    dest.mkdir()
    dest_repo = pygit2.init_repository(str(dest), bare=True)
    write_repo_version_config(dest_repo, 2)

    # walk _all_ references
    source_walker = source_repo.walk(
        source_repo.head.target,
        pygit2.GIT_SORT_TOPOLOGICAL | pygit2.GIT_SORT_REVERSE)
    for ref in source_repo.listall_reference_objects():
        source_walker.push(ref.resolve().target)

    commit_map = {}

    click.secho("\nWriting new commits ...", bold=True)
    for i, source_commit in enumerate(source_walker):
        dest_parents = []
        for parent_id in source_commit.parent_ids:
            try:
                dest_parents.append(commit_map[parent_id.hex])
            except KeyError:
                raise ValueError(
                    f"Commit {i} ({source_commit.id}): Haven't seen parent ({parent_id})"
                )

        _upgrade_commit(
            i,
            source_repo,
            source_commit,
            source_version,
            dest_parents,
            dest_repo,
            commit_map,
        )

    click.echo(f"{i+1} commits processed.")

    click.secho("\nUpdating references ...", bold=True)
    for ref in source_repo.listall_reference_objects():
        if ref.type == pygit2.GIT_REF_OID:
            # real references
            target = commit_map[ref.target.hex]
            dest_repo.references.create(ref.name, target, True)  # overwrite
            click.echo(f"  {ref.name} ({ref.target.hex[:8]} → {target[:8]})")

    for ref in source_repo.listall_reference_objects():
        if ref.type == pygit2.GIT_REF_SYMBOLIC:
            dest_repo.references.create(ref.name, ref.target)
            click.echo(f"  {ref.name} → {ref.target}")

    if source_repo.head_is_detached:
        dest_repo.set_head(
            pygit2.Oid(hex=commit_map[source_repo.head.target.hex]))
    else:
        dest_repo.set_head(source_repo.head.name)

    click.secho("\nCompacting repository ...", bold=True)
    subprocess.check_call(["git", "-C", str(dest), "gc"])

    if "sno.workingcopy.path" in source_repo.config:
        click.secho("\nCreating working copy ...", bold=True)
        subctx = click.Context(ctx.command, parent=ctx)
        subctx.ensure_object(context.Context)
        subctx.obj.user_repo_path = str(dest)
        subctx.invoke(checkout.create_workingcopy)

    click.secho("\nUpgrade complete", fg="green", bold=True)
Ejemplo n.º 24
0
def test_iterable(barerepo):
    l = [obj for obj in barerepo]
    oid = pygit2.Oid(hex=BLOB_HEX)
    assert oid in l
Ejemplo n.º 25
0
# Standard Library
import binascii
import os
import sys
import tempfile

import pygit2
import pytest

from . import utils

HEAD_SHA = '784855caf26449a1914d2cf62d12b9374d76ae78'
PARENT_SHA = 'f5e5aa4e36ab0fe62ee1ccc6eb8f79b866863b87'  # HEAD^
BLOB_HEX = 'af431f20fc541ed6d5afede3e2dc7160f6f01f16'
BLOB_RAW = binascii.unhexlify(BLOB_HEX.encode('ascii'))
BLOB_OID = pygit2.Oid(raw=BLOB_RAW)


def test_is_empty(barerepo):
    assert not barerepo.is_empty


def test_is_bare(barerepo):
    assert barerepo.is_bare


def test_head(barerepo):
    head = barerepo.head
    assert HEAD_SHA == head.target.hex
    assert type(head) == pygit2.Reference
    assert not barerepo.head_is_unborn
Ejemplo n.º 26
0
class MergeIndex:
    """
    Like a pygit2.Index, but every conflict has a short key independent of its path,
    and the entire index including conflicts can be serialised to an index file.
    Resolutions to conflicts can also be stored, independently of entries of conflicts.
    Conflicts are easier to modify than in a pygit2.Index (where they are backed by C iterators).
    When serialised to an index file, conflicts will be added in a special .conflicts/ directory,
    and resolutions will be added in a special .resolves/ directory (resolutions are called
    "resolves" here for brevity and with consistency with the verb, ie "sno resolve").
    """

    # We could use pygit2.IndexEntry everywhere but it has unhelpful __eq__ and __repr__ behaviour.
    # So we have this equivalent struct.
    # TODO - fix pygit2.IndexEntry.
    Entry = namedtuple("Entry", ("path", "id", "mode"))

    # Note that MergeIndex only contains Entries, which are simple structs -
    # not RichConflicts, which refer to the entire RepositoryStructure to give extra functionality.

    def __init__(self, entries, conflicts, resolves):
        self.entries = entries
        self.conflicts = conflicts
        self.resolves = resolves

    @classmethod
    def from_pygit2_index(cls, index):
        """
        Converts a pygit2.Index to a MergeIndex, preserving both entries and conflicts.
        Conflicts are assigned arbitrary unique keys based on the iteration order.
        """
        entries = {e.path: cls._ensure_entry(e) for e in index}
        conflicts = {
            str(k): cls._ensure_conflict(c) for k, c in enumerate(index.conflicts)
        }
        resolves = {}
        return MergeIndex(entries, conflicts, resolves)

    def __eq__(self, other):
        if not isinstance(other, MergeIndex):
            return False
        return (
            self.entries == other.entries
            and self.conflicts == other.conflicts
            and self.resolves == other.resolves
        )

    def __repr__(self):
        contents = json.dumps(
            {
                "entries": self.entries,
                "conflicts": self.conflicts,
                "resolves": self.resolves,
            },
            default=lambda o: str(o),
            indent=2,
        )
        return f'<MergeIndex {contents}>'

    def add(self, index_entry):
        index_entry = self._ensure_entry(index_entry)
        self.entries[index_entry.path] = index_entry

    def remove(self, path):
        del self.entries[path]

    def __iter__(self):
        return iter(self.entries.values())

    def __getitem__(self, path):
        return self.entries[path]

    def __setitem__(self, path, index_entry):
        assert path == index_entry.path
        self.entries[path] = index_entry

    def add_conflict(self, key, conflict):
        if not isinstance(key, str):
            raise TypeError("conflict key must be str", type(key))
        self.conflicts[key] = self._ensure_conflict(conflict)

    def remove_conflict(self, key):
        del self.conflicts[key]

    @classmethod
    def _serialise_conflict(cls, key, conflict):
        for version, entry in zip(AncestorOursTheirs.NAMES, conflict):
            if not entry:
                continue
            result_path = f".conflicts/{key}/{version}/{entry.path}"
            yield cls.Entry(result_path, entry.id, entry.mode)

    def _serialise_conflicts(self):
        for key, conflict3 in self.conflicts.items():
            yield from self._serialise_conflict(key, conflict3)

    _CONFLICT_PATTERN = re.compile(
        r"^.conflicts/(?P<key>[^/]+)/(?P<version>ancestor|ours|theirs)/(?P<path>.+)$"
    )

    @classmethod
    def _deserialise_conflict_part(cls, index_entry):
        match = cls._CONFLICT_PATTERN.match(index_entry.path)
        if not match:
            raise RuntimeError(f"Couldn't deserialise conflict: {index_entry.path}")

        key = match.group("key")
        version = match.group("version")
        result_path = match.group("path")
        result_entry = cls.Entry(result_path, index_entry.id, index_entry.mode)
        result = AncestorOursTheirs.partial(**{version: result_entry})
        return key, result

    @ungenerator(set)
    def _conflicts_paths(self):
        """All the paths in all the entries in all the conflicts, as a set."""
        for conflict in self.conflicts.values():
            for entry in conflict:
                if entry:
                    yield entry.path

    def add_resolve(self, key, resolve):
        if not isinstance(key, str):
            raise TypeError("resolve key must be str", type(key))
        self.resolves[key] = self._ensure_resolve(resolve)

    def remove_resolve(self, key):
        del self.resolves[key]

    _EMPTY_OID = pygit2.Oid(hex="0" * 40)
    _EMPTY_MODE = pygit2.GIT_FILEMODE_BLOB

    @classmethod
    def _serialise_resolve(cls, key, resolve):
        # We always yield at least one entry per resolve, even when the resolve
        # has no features - otherwise it would appear to be unresolved.
        yield cls.Entry(f".resolves/{key}/resolved", cls._EMPTY_OID, cls._EMPTY_MODE)

        for i, entry in enumerate(resolve):
            result_path = f".resolves/{key}/{i}/{entry.path}"
            yield cls.Entry(result_path, entry.id, entry.mode)

    def _serialise_resolves(self):
        for key, resolve3 in self.resolves.items():
            yield from self._serialise_resolve(key, resolve3)

    _RESOLVED_PATTERN = re.compile(r"^.resolves/(?P<key>.+?)/resolved$")
    _RESOLVE_PART_PATTERN = re.compile(
        r"^.resolves/(?P<key>[^/]+)/(?P<i>[^/]+)/(?P<path>.+)$"
    )

    @classmethod
    def _deserialise_resolve_part(cls, index_entry):
        match = cls._RESOLVED_PATTERN.match(index_entry.path)
        if match:
            return match.group("key"), None

        match = cls._RESOLVE_PART_PATTERN.match(index_entry.path)
        if not match:
            raise RuntimeError(f"Couldn't deserialise resolve: {index_entry.path}")

        key = match.group("key")
        result_path = match.group("path")
        result_entry = cls.Entry(result_path, index_entry.id, index_entry.mode)
        return key, result_entry

    def _resolves_entries(self):
        """All the entries in all the resolves."""
        for resolve in self.resolves.values():
            for entry in resolve:
                if entry:
                    yield entry

    @property
    def unresolved_conflicts(self):
        return {k: v for k, v in self.conflicts.items() if k not in self.resolves}

    @classmethod
    def read(cls, path):
        """Deserialise a MergeIndex from the given path."""
        index = pygit2.Index(str(path))
        if index.conflicts:
            raise RuntimeError("pygit2.Index conflicts should be empty")
        entries = {}
        conflicts = {}
        resolves = {}
        for e in index:
            if e.path.startswith(".conflicts/"):
                key, conflict_part = cls._deserialise_conflict_part(e)
                conflicts.setdefault(key, AncestorOursTheirs.EMPTY)
                conflicts[key] |= conflict_part
            elif e.path.startswith(".resolves/"):
                key, resolve_part = cls._deserialise_resolve_part(e)
                resolves.setdefault(key, [])
                if resolve_part:
                    resolves[key] += [resolve_part]
            else:
                entries[e.path] = cls._ensure_entry(e)

        return MergeIndex(entries, conflicts, resolves)

    @classmethod
    def read_from_repo(cls, repo):
        """Deserialise a MergeIndex from the MERGE_INDEX file in the given repo."""
        return cls.read(repo_file_path(repo, MERGE_INDEX))

    def write(self, path):
        """
        Serialise this MergeIndex to the given path.
        Regular entries, conflicts, and resolves are each serialised separately,
        so that they can be roundtripped accurately.
        """
        index = pygit2.Index(str(path))
        index.clear()

        for e in self.entries.values():
            index.add(pygit2.IndexEntry(e.path, e.id, e.mode))
        for e in self._serialise_conflicts():
            index.add(pygit2.IndexEntry(e.path, e.id, e.mode))
        for e in self._serialise_resolves():
            index.add(pygit2.IndexEntry(e.path, e.id, e.mode))
        index.write()

    def write_to_repo(self, repo):
        """Serialise this MergeIndex to the MERGE_INDEX file in the given repo."""
        self.write(repo_file_path(repo, MERGE_INDEX))

    def write_resolved_tree(self, repo):
        """
        Write all the merged entries and the resolved conflicts to a tree in the given repo.
        Resolved conflicts will be written the same as merged entries in the resulting tree.
        Only works when all conflicts are resolved.
        """
        assert not self.unresolved_conflicts
        index = pygit2.Index()

        # Entries that were merged automatically by libgit2, often trivially:
        for e in self.entries.values():
            index.add(pygit2.IndexEntry(e.path, e.id, e.mode))

        # libgit2 leaves entries in the main part of the index, even if they are conflicts.
        # We make sure this index only contains merged entries and resolved conflicts.
        index.remove_all(list(self._conflicts_paths()))

        # Entries that have been explicitly selected to resolve conflicts:
        for e in self._resolves_entries():
            index.add(pygit2.IndexEntry(e.path, e.id, e.mode))

        return index.write_tree(repo)

    @classmethod
    def _ensure_entry(cls, entry):
        if entry is None or isinstance(entry, cls.Entry):
            return entry
        elif isinstance(entry, pygit2.IndexEntry):
            return cls.Entry(entry.path, entry.id, entry.mode)
        else:
            raise TypeError(
                "Expected entry to be type Entry or IndexEntry", type(entry)
            )

    @classmethod
    def _ensure_conflict(cls, conflict):
        if isinstance(conflict, AncestorOursTheirs):
            return conflict
        elif isinstance(conflict, tuple):
            return AncestorOursTheirs(
                cls._ensure_entry(conflict[0]),
                cls._ensure_entry(conflict[1]),
                cls._ensure_entry(conflict[2]),
            )
        else:
            raise TypeError(
                "Expected conflict to be type AncestorOursTheirs or tuple",
                type(conflict),
            )

    @classmethod
    def _ensure_resolve(cls, resolve):
        return [cls._ensure_entry(e) for e in resolve]
Ejemplo n.º 27
0
def get_oid(h):
    return pygit2.Oid(hex=h)
Ejemplo n.º 28
0
def start(
    _,
    name,
    env,
    version=None,
    bucket=None,
    dry=False,
    yes=False,
    rollback=False,
    profile=None,
):
    """
    Deploy a release on an environment.
    """
    client = utils.s3_client(profile)
    repo = utils.git_repo()

    if version is None:
        release = next(get_releases(client, name), None)

    else:
        release = get_release(client, name, int(version))

    if release is None:
        utils.fatal("Release not found")

    if bucket is None:
        bucket = utils.get_config()["deploy"][env]["s3_bucket"]

    last_deploy = next(get_releases(client, name, bucket=bucket), None)

    last_deployed_version = int(last_deploy.version) if last_deploy else 0
    if version is not None:
        since = min(int(version), last_deployed_version)
    else:
        since = last_deployed_version

    releases = list(get_releases(client, name, since=since))

    # the field `commits` is not present in all documents as it was introduced
    # in a later version. if any of the releases doesn't track them, we'll
    # skip the commit filtering to avoid not showing commits in the changelog.
    if any(rel.commits is None for rel in releases):
        commits = None

    else:
        commits = [
            commit for rel in releases if rel.commits for commit in rel.commits
        ]

    if last_deploy is None:
        # first deploy is always None
        changelog = utils.changelog(repo,
                                    release.commit,
                                    None,
                                    keep_only_commits=commits)

        changelog_text = changelog.short_text
        is_rollback = release.rollback

    else:
        # create a changelog from the latest deploy commit
        changelog = utils.changelog(
            repo,
            git.Oid(hex=release.commit),
            git.Oid(hex=last_deploy.commit),
            keep_only_commits=commits,
        )

        changelog_text = changelog.short_text
        is_rollback = changelog.rollback

    action_type = ActionType.automated if config.IS_CONCOURSE else ActionType.manual

    release = dataclasses.replace(
        release,
        changelog=changelog_text,
        timestamp=datetime.now(),
        author=utils.get_author(repo, git.Oid(hex=release.commit)),
        rollback=is_rollback,
        action_type=action_type,
        commits=commits,
    )

    utils.printfmt(release)

    if dry:
        return

    if release.rollback:
        commit_count = len(changelog.logs)
        utils.warning(":warning: This is a rollback! :warning:\n")
        utils.warning(
            f":warning: You are rolling back from {name} v{last_deployed_version} to v{version} :warning:\n"
        )
        utils.warning(
            f":warning: This will remove the above {commit_count} commits from {env} :warning:\n"
        )

        if not rollback:
            utils.error("Missing flag --rollback\n")
            utils.fatal("Aborted!")

    if not yes:

        if release.rollback:
            ok = utils.confirm(
                "Are you sure you want to start a rollback deployment?",
                style=utils.TextStyle.yellow,
            )

            if not ok:
                utils.fatal("Aborted!")

        ok = utils.confirm("Are you sure you want to start this deployment?")
        if not ok:
            utils.fatal("Aborted!")

    put_release(client, bucket, name, release)
    utils.success("Started new deployment :rocket:\n")
Ejemplo n.º 29
0
import sys
import pygit2
import tarfile
from datetime import date

repo = None
try:
    repo = pygit2.Repository('.')
    print("Found repo at {}".format(repo.path))
except:
    print("No repository in current dir")
    exit(1)

files = []

master_oid = pygit2.Oid(hex=repo.head.peel().hex)
today = date.today()
with tarfile.open("taxbreak{}-{}.tar.gz".format(today.year, today.month),
                  "w:gz") as tar:
    for commit_hash in sys.argv[1:]:
        commit = repo.revparse_single(commit_hash)
        repo.checkout_tree(commit)

        diff = repo.diff(commit_hash + '~1', commit_hash)

        for delta in diff.deltas:
            path = delta.old_file.path
            tar.add(path, arcname=path + '.' + commit_hash[:9])
            files.append(path)
        repo.reset(master_oid, pygit2.GIT_RESET_HARD)
files = set(files)
Ejemplo n.º 30
0
        line = line[len(parents):]

        prefix = 2 * (len(parents) + 1)
        data = line.split(' ', prefix)
        data = data[prefix]
        data = data.split('\t')

        file_changes.append(data)

    print >> f, '<ul>'

    for change in file_changes:
        print >> f, '<li>%s <a href="/commit/%s/%s">%s</a>' % (
            change[0], commit.id, change[1], cgi.escape(change[1]))

    print >> f, '</ul>'

    if path:
        show_file(f, commit, path)

    print >> f, '</body>'
    print >> f, '</html>'


map = {}
for commit in blame_repo.walk(blame_repo.head.target):
    orig = pygit2.Oid(hex=commit.message.split()[1])
    map[orig] = commit

show_commit(sys.stdout, repo.get(sys.argv[3]), sys.argv[4])