Exemplo n.º 1
0
    def insertChangeset(db, parent, child, files):
        while True:
            # Inserting new files will often clash when creating multiple
            # related changesets in parallel.  It's a simple operation, so if it
            # fails with an integrity error, just try again until it doesn't
            # fail.  (It will typically succeed the second time because then the
            # new files already exist, and it doesn't need to insert anything.)
            try:
                dbutils.find_files(db, files)
                db.commit()
                break
            except dbutils.IntegrityError:
                db.rollback()

        cursor = db.cursor()
        cursor.execute(
            "INSERT INTO changesets (type, parent, child) VALUES (%s, %s, %s) RETURNING id",
            (changeset_type, parent.getId(db) if parent else None,
             child.getId(db)))
        changeset_id = cursor.fetchone()[0]

        fileversions_values = []
        chunks_values = []

        file_ids = set()

        for file in files:
            if file.id in file_ids:
                raise Exception("duplicate:%d:%s" % (file.id, file.path))
            file_ids.add(file.id)

            fileversions_values.append(
                (changeset_id, file.id, file.old_sha1, file.new_sha1,
                 file.old_mode, file.new_mode))

            for index, chunk in enumerate(file.chunks):
                chunk.analyze(file, index == len(file.chunks) - 1)
                chunks_values.append(
                    (changeset_id, file.id, chunk.delete_offset,
                     chunk.delete_count, chunk.insert_offset,
                     chunk.insert_count, chunk.analysis,
                     1 if chunk.is_whitespace else 0))

            file.clean()

        if fileversions_values:
            cursor.executemany(
                """INSERT INTO fileversions (changeset, file, old_sha1, new_sha1, old_mode, new_mode)
                                       VALUES (%s, %s, %s, %s, %s, %s)""",
                fileversions_values)
        if chunks_values:
            cursor.executemany(
                """INSERT INTO chunks (changeset, file, deleteOffset, deleteCount, insertOffset, insertCount, analysis, whitespace)
                                       VALUES (%s, %s, %s, %s, %s, %s, %s, %s)""",
                chunks_values)

        return changeset_id
Exemplo n.º 2
0
    def insertChangeset(db, parent, child, files):
        while True:
            # Inserting new files will often clash when creating multiple
            # related changesets in parallel.  It's a simple operation, so if it
            # fails with an integrity error, just try again until it doesn't
            # fail.  (It will typically succeed the second time because then the
            # new files already exist, and it doesn't need to insert anything.)
            try:
                dbutils.find_files(db, files)
                db.commit()
                break
            except dbutils.IntegrityError:
                if repository_name == "chromium":
                    raise Exception, repr((parent, child, files))
                db.rollback()

        cursor = db.cursor()
        cursor.execute("INSERT INTO changesets (type, parent, child) VALUES (%s, %s, %s) RETURNING id",
                       (changeset_type, parent.getId(db) if parent else None, child.getId(db)))
        changeset_id = cursor.fetchone()[0]

        fileversions_values = []
        chunks_values = []

        file_ids = set()

        for file in files:
            if file.id in file_ids: raise Exception, "duplicate:%d:%s" % (file.id, file.path)
            file_ids.add(file.id)

            fileversions_values.append((changeset_id, file.id, file.old_sha1, file.new_sha1, file.old_mode, file.new_mode))

            for index, chunk in enumerate(file.chunks):
                chunk.analyze(file, index == len(file.chunks) - 1)
                chunks_values.append((changeset_id, file.id, chunk.delete_offset, chunk.delete_count, chunk.insert_offset, chunk.insert_count, chunk.analysis, 1 if chunk.is_whitespace else 0))

            file.clean()

        if fileversions_values:
            cursor.executemany("""INSERT INTO fileversions (changeset, file, old_sha1, new_sha1, old_mode, new_mode)
                                       VALUES (%s, %s, %s, %s, %s, %s)""",
                               fileversions_values)
        if chunks_values:
            cursor.executemany("""INSERT INTO chunks (changeset, file, deleteOffset, deleteCount, insertOffset, insertCount, analysis, whitespace)
                                       VALUES (%s, %s, %s, %s, %s, %s, %s, %s)""",
                               chunks_values)

        return changeset_id
Exemplo n.º 3
0
def createChangeset(db, user, repository, commit=None, from_commit=None, to_commit=None, rescan=False, reanalyze=False, conflicts=False, filtered_file_ids=None, review=None, do_highlight=True, load_chunks=True):
    cursor = db.cursor()

    if conflicts:
        if commit:
            assert len(commit.parents) > 1

            cursor.execute("SELECT replay FROM mergereplays WHERE original=%s", (commit.getId(db),))
            row = cursor.fetchone()

            if row:
                replay = gitutils.Commit.fromId(db, repository, row[0])
            else:
                replay = repository.replaymerge(db, user, commit)
                if not replay: return None
                cursor.execute("INSERT INTO mergereplays (original, replay) VALUES (%s, %s)", (commit.getId(db), replay.getId(db)))

            from_commit = replay
            to_commit = commit

            parents = [replay]
        else:
            parents = [from_commit]
            commit = to_commit

        changeset_type = 'conflicts'
    elif commit:
        parents = [gitutils.Commit.fromSHA1(db, repository, sha1) for sha1 in commit.parents] or [None]
        changeset_type = 'merge' if len(parents) > 1 else 'direct'
    else:
        parents = [from_commit]
        commit = to_commit
        changeset_type = 'direct' if len(to_commit.parents) == 1 and from_commit == to_commit.parents[0] else 'custom'

    changes = None
    changesets = []
    fileversions_values = []
    chunks_values = []

    thin_diff = False

    changeset_ids = []

    for parent in parents:
        if parent:
            cursor.execute("SELECT id FROM changesets WHERE parent=%s AND child=%s AND type=%s",
                           (parent.getId(db), commit.getId(db), changeset_type))
        else:
            cursor.execute("SELECT id FROM changesets WHERE parent IS NULL AND child=%s AND type=%s",
                           (commit.getId(db), changeset_type))

        row = cursor.fetchone()

        if row: changeset_ids.append(row[0])
        else: break

    assert len(changeset_ids) in (0, len(parents))

    if changeset_ids:
        if rescan and user.hasRole(db, "developer"):
            cursor.executemany("DELETE FROM changesets WHERE id=%s", [(changeset_id,) for changeset_id in changeset_ids])
            db.commit()
            changeset_ids = []
        else:
            for changeset_id in changeset_ids:
                if changeset_type == 'custom':
                    cursor.execute("UPDATE customchangesets SET time=NOW() WHERE changeset=%s", (changeset_id,))

                changeset = load.loadChangeset(db, repository, changeset_id, filtered_file_ids=filtered_file_ids, load_chunks=load_chunks)
                changeset.conflicts = conflicts

                if reanalyze and user.hasRole(db, "developer"):
                    analysis_values = []

                    for file in changeset.files:
                        if not filtered_file_ids or file.id in filtered_file_ids:
                            for index, chunk in enumerate(file.chunks):
                                old_analysis = chunk.analysis
                                chunk.analyze(file, index == len(file.chunks) - 1, True)
                                if old_analysis != chunk.analysis:
                                    analysis_values.append((chunk.analysis, chunk.id))

                    if reanalyze == "commit" and analysis_values:
                        cursor.executemany("UPDATE chunks SET analysis=%s WHERE id=%s", analysis_values)

                changesets.append(changeset)

    if not changesets:
        if len(parents) == 1 and from_commit and to_commit and filtered_file_ids:
            if from_commit.isAncestorOf(to_commit):
                iter_commit = to_commit
                while iter_commit != from_commit:
                    if len(iter_commit.parents) > 1:
                        thin_diff = True
                        break
                    iter_commit = gitutils.Commit.fromSHA1(db, repository, iter_commit.parents[0])
            else:
                thin_diff = True

        if not thin_diff:
            if changeset_type == "direct":
                request = { "changeset_type": "direct",
                            "child_sha1": commit.sha1 }
            elif changeset_type == "custom":
                request = { "changeset_type": "custom",
                            "parent_sha1": from_commit.sha1 if from_commit else "0" * 40,
                            "child_sha1": to_commit.sha1 }
            elif changeset_type == "merge":
                request = { "changeset_type": "merge",
                            "child_sha1": commit.sha1 }
            else:
                request = { "changeset_type": "conflicts",
                            "parent_sha1": from_commit.sha1,
                            "child_sha1": to_commit.sha1 }

            request["repository_name"] = repository.name

            client.requestChangesets([request])

            db.commit()

            for parent in parents:
                if parent:
                    cursor.execute("SELECT id FROM changesets WHERE parent=%s AND child=%s AND type=%s",
                                   (parent.getId(db), commit.getId(db), changeset_type))
                else:
                    cursor.execute("SELECT id FROM changesets WHERE parent IS NULL AND child=%s AND type=%s",
                                   (commit.getId(db), changeset_type))

                changeset_id = cursor.fetchone()[0]
                changeset = load.loadChangeset(db, repository, changeset_id, filtered_file_ids=filtered_file_ids, load_chunks=load_chunks)
                changeset.conflicts = conflicts

                changesets.append(changeset)
        else:
            changes = diff.parse.parseDifferences(repository, from_commit=from_commit, to_commit=to_commit, filter_paths=[describe_file(db, file_id) for file_id in filtered_file_ids])[from_commit.sha1]

            dbutils.find_files(db, changes)

            for file in changes:
                for index, chunk in enumerate(file.chunks):
                    chunk.analyze(file, index == len(file.chunks) - 1)

            changeset = diff.Changeset(None, from_commit, to_commit, changeset_type)
            changeset.conflicts = conflicts
            changeset.files = diff.File.sorted(changes)

            changesets.append(changeset)

    if do_highlight:
        highlights = {}

        for changeset in changesets:
            for file in changeset.files:
                if file.canHighlight():
                    if file.old_sha1 and file.old_sha1 != '0' * 40:
                        highlights[file.old_sha1] = (file.path, file.getLanguage())
                    if file.new_sha1 and file.new_sha1 != '0' * 40:
                        highlights[file.new_sha1] = (file.path, file.getLanguage())

        syntaxhighlight.request.requestHighlights(repository, highlights)

    return changesets
Exemplo n.º 4
0
def createChangeset(db,
                    user,
                    repository,
                    commit=None,
                    from_commit=None,
                    to_commit=None,
                    rescan=False,
                    reanalyze=False,
                    conflicts=False,
                    filtered_file_ids=None,
                    review=None,
                    do_highlight=True,
                    load_chunks=True):
    cursor = db.cursor()

    if conflicts:
        assert commit and len(commit.parents) > 1

        cursor.execute("SELECT replay FROM mergereplays WHERE original=%s",
                       (commit.getId(db), ))
        row = cursor.fetchone()

        if row:
            replay = gitutils.Commit.fromId(db, repository, row[0])
        else:
            replay = repository.replaymerge(db, user, commit)
            if not replay: return None
            cursor.execute(
                "INSERT INTO mergereplays (original, replay) VALUES (%s, %s)",
                (commit.getId(db), replay.getId(db)))

        from_commit = replay
        to_commit = commit

        parents = [replay]
        changeset_type = 'conflicts'
    elif commit:
        parents = [
            gitutils.Commit.fromSHA1(db, repository, sha1)
            for sha1 in commit.parents
        ] or [None]
        changeset_type = 'merge' if len(parents) > 1 else 'direct'
    else:
        parents = [from_commit]
        commit = to_commit
        changeset_type = 'direct' if len(
            to_commit.parents
        ) == 1 and from_commit == to_commit.parents[0] else 'custom'

    changes = None
    changesets = []
    fileversions_values = []
    chunks_values = []

    thin_diff = False

    changeset_ids = []

    for parent in parents:
        if parent:
            cursor.execute(
                "SELECT id FROM changesets WHERE parent=%s AND child=%s AND type=%s",
                (parent.getId(db), commit.getId(db), changeset_type))
        else:
            cursor.execute(
                "SELECT id FROM changesets WHERE parent IS NULL AND child=%s AND type=%s",
                (commit.getId(db), changeset_type))

        row = cursor.fetchone()

        if row: changeset_ids.append(row[0])
        else: break

    assert len(changeset_ids) in (0, len(parents))

    if changeset_ids:
        if rescan and user.hasRole(db, "developer"):
            cursor.executemany("DELETE FROM changesets WHERE id=%s",
                               [(changeset_id, )
                                for changeset_id in changeset_ids])
            db.commit()
            changeset_ids = []
        else:
            for changeset_id in changeset_ids:
                if changeset_type == 'custom':
                    cursor.execute(
                        "UPDATE customchangesets SET time=NOW() WHERE changeset=%s",
                        (changeset_id, ))

                changeset = load.loadChangeset(
                    db,
                    repository,
                    changeset_id,
                    filtered_file_ids=filtered_file_ids,
                    load_chunks=load_chunks)
                changeset.conflicts = conflicts

                if reanalyze and user.hasRole(db, "developer"):
                    analysis_values = []

                    for file in changeset.files:
                        if not filtered_file_ids or file.id in filtered_file_ids:
                            for index, chunk in enumerate(file.chunks):
                                old_analysis = chunk.analysis
                                chunk.analyze(file,
                                              index == len(file.chunks) - 1,
                                              True)
                                if old_analysis != chunk.analysis:
                                    analysis_values.append(
                                        (chunk.analysis, chunk.id))

                    if reanalyze == "commit" and analysis_values:
                        cursor.executemany(
                            "UPDATE chunks SET analysis=%s WHERE id=%s",
                            analysis_values)

                changesets.append(changeset)

    if not changesets:
        if len(parents
               ) == 1 and from_commit and to_commit and filtered_file_ids:
            iter_commit = to_commit
            while iter_commit != from_commit:
                if len(iter_commit.parents) > 1:
                    thin_diff = True
                    break
                iter_commit = gitutils.Commit.fromSHA1(db, repository,
                                                       iter_commit.parents[0])

        if not thin_diff:
            if changeset_type == "direct":
                request = {
                    "changeset_type": "direct",
                    "child_sha1": commit.sha1
                }
            elif changeset_type == "custom":
                request = {
                    "changeset_type": "custom",
                    "parent_sha1": from_commit.sha1,
                    "child_sha1": to_commit.sha1
                }
            elif changeset_type == "merge":
                request = {
                    "changeset_type": "merge",
                    "child_sha1": commit.sha1
                }
            else:
                request = {
                    "changeset_type": "conflicts",
                    "parent_sha1": replay.sha1,
                    "child_sha1": commit.sha1
                }

            request["repository_name"] = repository.name

            client.requestChangesets([request])

            db.commit()

            for parent in parents:
                if parent:
                    cursor.execute(
                        "SELECT id FROM changesets WHERE parent=%s AND child=%s AND type=%s",
                        (parent.getId(db), commit.getId(db), changeset_type))
                else:
                    cursor.execute(
                        "SELECT id FROM changesets WHERE parent IS NULL AND child=%s AND type=%s",
                        (commit.getId(db), changeset_type))

                changeset_id = cursor.fetchone()[0]
                changeset = load.loadChangeset(
                    db,
                    repository,
                    changeset_id,
                    filtered_file_ids=filtered_file_ids,
                    load_chunks=load_chunks)
                changeset.conflicts = conflicts

                changesets.append(changeset)
        else:
            changes = diff.parse.parseDifferences(
                repository,
                from_commit=from_commit,
                to_commit=to_commit,
                filter_paths=[
                    describe_file(db, file_id) for file_id in filtered_file_ids
                ])[from_commit.sha1]

            dbutils.find_files(db, changes)

            for file in changes:
                for index, chunk in enumerate(file.chunks):
                    chunk.analyze(file, index == len(file.chunks) - 1)

            changeset = diff.Changeset(None, from_commit, to_commit,
                                       changeset_type)
            changeset.conflicts = conflicts
            changeset.files = diff.File.sorted(changes)

            changesets.append(changeset)

    if do_highlight:
        highlights = {}

        for changeset in changesets:
            for file in changeset.files:
                if file.canHighlight():
                    if file.old_sha1 and file.old_sha1 != '0' * 40:
                        highlights[file.old_sha1] = (file.path,
                                                     file.getLanguage())
                    if file.new_sha1 and file.new_sha1 != '0' * 40:
                        highlights[file.new_sha1] = (file.path,
                                                     file.getLanguage())

        syntaxhighlight.request.requestHighlights(repository, highlights)

    return changesets