Exemplo n.º 1
0
def test_get_metrics_inner_first_commit(cloneable_with_commits):
    repo_parser = RepoParser(cloneable_with_commits.path)
    with repo_parser.repo_checked_out():
        metrics = _get_metrics_inner((
            None, cloneable_with_commits.commits[0],
            repo_parser, [LinesOfCodeParser],
        ))
        assert Metric(name='TotalLinesOfCode', value=0) in metrics
Exemplo n.º 2
0
def test_get_metrics_inner_first_commit(cloneable_with_commits):
    repo_parser = RepoParser(cloneable_with_commits.path)
    with repo_parser.repo_checked_out():
        metrics = _get_metrics_inner((
            None, cloneable_with_commits.commits[0],
            repo_parser, [LinesOfCodeParser], re.compile(b'^$'),
        ))
        assert Metric(name='TotalLinesOfCode', value=0) in metrics
Exemplo n.º 3
0
def test_get_metrics_inner_nth_commit(cloneable_with_commits):
    repo_parser = RepoParser(cloneable_with_commits.path)
    with repo_parser.repo_checked_out():
        metrics = _get_metrics_inner((
            cloneable_with_commits.commits[-2],
            cloneable_with_commits.commits[-1],
            repo_parser, [LinesOfCodeParser],
        ))
        assert Metric(name='TotalLinesOfCode', value=2) in metrics
Exemplo n.º 4
0
def test_get_metrics_inner_nth_commit(cloneable_with_commits):
    repo_parser = RepoParser(cloneable_with_commits.path)
    with repo_parser.repo_checked_out():
        metrics = _get_metrics_inner((
            cloneable_with_commits.commits[-2],
            cloneable_with_commits.commits[-1],
            repo_parser,
            [LinesOfCodeParser],
            re.compile(b'^$'),
        ))
        assert Metric(name='TotalLinesOfCode', value=2) in metrics
Exemplo n.º 5
0
def load_data(
    database_file,
    repo,
    package_names,
    skip_defaults,
):
    metric_parsers = get_metric_parsers_from_args(package_names, skip_defaults)

    with sqlite3.connect(database_file) as db:
        metric_mapping = get_metric_mapping(db)

        repo_parser = RepoParser(repo)

        with repo_parser.repo_checked_out():
            previous_sha = get_previous_sha(db)
            commits = repo_parser.get_commits(since_sha=previous_sha)

            # If there is nothing to check gtfo
            if len(commits) == 1 and previous_sha is not None:
                return

            # Maps metric_name to a running value
            metric_values = collections.defaultdict(int)

            # Grab the state of our metrics at the last place
            compare_commit = None
            if previous_sha is not None:
                compare_commit = commits[0]
                metric_values.update(
                    get_metric_values(
                        db,
                        compare_commit.sha,
                    ))
                commits = commits[1:]

            mp_args = six.moves.zip(
                [compare_commit] + commits,
                commits,
                itertools.repeat(repo_parser),
                itertools.repeat(metric_parsers),
            )
            pool = multiprocessing.pool.Pool(15)
            for commit, metrics in six.moves.zip(
                    commits,
                    pool.imap(_get_metrics_inner, mp_args),
            ):
                increment_metric_values(metric_values, metrics)
                insert_metric_values(
                    db,
                    metric_values,
                    metric_mapping,
                    commit,
                )
                insert_metric_changes(db, metrics, metric_mapping, commit)
Exemplo n.º 6
0
def load_data(
    database_file,
    repo,
    package_names,
    skip_defaults,
    exclude,
    jobs,
):
    metric_parsers = get_metric_parsers_from_args(package_names, skip_defaults)

    with WriteableDatabaseLogic.for_sqlite(database_file) as db_logic:
        metric_mapping = db_logic.get_metric_mapping()
        has_data = db_logic.get_metric_has_data()

        repo_parser = RepoParser(repo)

        with repo_parser.repo_checked_out():
            previous_sha = db_logic.get_previous_sha()
            commits = repo_parser.get_commits(since_sha=previous_sha)

            # If there is nothing to check gtfo
            if len(commits) == 1 and previous_sha is not None:
                return

            # Maps metric_id to a running value
            metric_values = collections.Counter()

            # Grab the state of our metrics at the last place
            compare_commit = None
            if previous_sha is not None:
                compare_commit = commits.pop(0)
                metric_values.update(
                    db_logic.get_metric_values(compare_commit.sha))

            mp_args = six.moves.zip(
                [compare_commit] + commits,
                commits,
                itertools.repeat(repo_parser),
                itertools.repeat(metric_parsers),
                itertools.repeat(exclude),
            )
            with mapper(jobs) as do_map:
                for commit, metrics in six.moves.zip(
                        commits,
                        do_map(_get_metrics_inner, mp_args),
                ):
                    db_logic.update_has_data(metrics, metric_mapping, has_data)
                    increment_metrics(metric_values, metric_mapping, metrics)
                    db_logic.insert_metric_values(metric_values, has_data,
                                                  commit)
                    db_logic.insert_metric_changes(metrics, metric_mapping,
                                                   commit)
Exemplo n.º 7
0
def test_repo_checked_out(cloneable):
    repo_parser = RepoParser(cloneable)
    assert repo_parser.tempdir is None

    with repo_parser.repo_checked_out():
        assert repo_parser.tempdir is not None

        tempdir_path = repo_parser.tempdir
        assert os.path.exists(tempdir_path)
        assert os.path.exists(os.path.join(tempdir_path, '.git'))

    assert repo_parser.tempdir is None
    assert not os.path.exists(tempdir_path)
Exemplo n.º 8
0
def load_data(
    database_file,
    repo,
    package_names,
    skip_defaults,
    exclude,
    jobs,
):
    metric_parsers = get_metric_parsers_from_args(package_names, skip_defaults)

    with sqlite3.connect(database_file) as db:
        metric_mapping = get_metric_mapping(db)  # type: Dict[str, int]
        has_data = get_metric_has_data(db)  # type: Dict[int, bool]

        repo_parser = RepoParser(repo)

        with repo_parser.repo_checked_out():
            previous_sha = get_previous_sha(db)
            commits = repo_parser.get_commits(since_sha=previous_sha)

            # If there is nothing to check gtfo
            if len(commits) == 1 and previous_sha is not None:
                return

            # Maps metric_id to a running value
            metric_values = collections.Counter()  # type: Counter[int]

            # Grab the state of our metrics at the last place
            compare_commit = None
            if previous_sha is not None:
                compare_commit = commits.pop(0)
                metric_values.update(get_metric_values(db, compare_commit.sha))

            mp_args = six.moves.zip(
                [compare_commit] + commits,
                commits,
                itertools.repeat(repo_parser),
                itertools.repeat(metric_parsers),
                itertools.repeat(exclude),
            )
            do_map = mapper(jobs)
            for commit, metrics in six.moves.zip(
                    commits,
                    do_map(_get_metrics_inner, mp_args),
            ):
                update_has_data(db, metrics, metric_mapping, has_data)
                increment_metric_values(metric_values, metric_mapping, metrics)
                insert_metric_values(db, metric_values, has_data, commit)
                insert_metric_changes(db, metrics, metric_mapping, commit)
Exemplo n.º 9
0
def load_data(
        database_file: str,
        repo: str,
        package_names: List[str],
        skip_defaults: bool,
        exclude: Pattern[bytes],
        jobs: int,
) -> None:
    metric_parsers = get_metric_parsers_from_args(package_names, skip_defaults)

    with sqlite3.connect(database_file) as db:
        metric_mapping = get_metric_mapping(db)
        has_data = get_metric_has_data(db)

        repo_parser = RepoParser(repo)

        with repo_parser.repo_checked_out():
            previous_sha = get_previous_sha(db)
            commits = repo_parser.get_commits(since_sha=previous_sha)

            # If there is nothing to check gtfo
            if len(commits) == 1 and previous_sha is not None:
                return

            # Maps metric_id to a running value
            metric_values: Counter[int] = collections.Counter()

            # Grab the state of our metrics at the last place
            compare_commit = None
            if previous_sha is not None:
                compare_commit = commits.pop(0)
                metric_values.update(get_metric_values(db, compare_commit.sha))

            mp_args = zip(
                [compare_commit, *commits],
                commits,
                itertools.repeat(repo_parser),
                itertools.repeat(metric_parsers),
                itertools.repeat(exclude),
            )
            with mapper(jobs) as do_map:
                for commit, metrics in zip(
                        commits, do_map(_get_metrics_inner, mp_args),
                ):
                    update_has_data(db, metrics, metric_mapping, has_data)
                    increment_metrics(metric_values, metric_mapping, metrics)
                    insert_metric_values(db, metric_values, has_data, commit)
                    insert_metric_changes(db, metrics, metric_mapping, commit)
Exemplo n.º 10
0
def load_data(
        database_file,
        repo,
        package_names,
        skip_defaults,
        exclude,
        jobs,
):
    metric_parsers = get_metric_parsers_from_args(package_names, skip_defaults)

    with sqlite3.connect(database_file) as db:
        metric_mapping = get_metric_mapping(db)
        has_data = get_metric_has_data(db)

        repo_parser = RepoParser(repo)

        with repo_parser.repo_checked_out():
            previous_sha = get_previous_sha(db)
            commits = repo_parser.get_commits(since_sha=previous_sha)

            # If there is nothing to check gtfo
            if len(commits) == 1 and previous_sha is not None:
                return

            # Maps metric_id to a running value
            metric_values = collections.Counter()

            # Grab the state of our metrics at the last place
            compare_commit = None
            if previous_sha is not None:
                compare_commit = commits.pop(0)
                metric_values.update(get_metric_values(db, compare_commit.sha))

            mp_args = six.moves.zip(
                [compare_commit] + commits,
                commits,
                itertools.repeat(repo_parser),
                itertools.repeat(metric_parsers),
                itertools.repeat(exclude),
            )
            with mapper(jobs) as do_map:
                for commit, metrics in six.moves.zip(
                        commits, do_map(_get_metrics_inner, mp_args),
                ):
                    update_has_data(db, metrics, metric_mapping, has_data)
                    increment_metrics(metric_values, metric_mapping, metrics)
                    insert_metric_values(db, metric_values, has_data, commit)
                    insert_metric_changes(db, metrics, metric_mapping, commit)
Exemplo n.º 11
0
def load_data(
        database_file,
        repo,
        package_names,
        skip_defaults,
        tempdir_location,
):
    metric_parsers = get_metric_parsers_from_args(package_names, skip_defaults)

    with sqlite3.connect(database_file) as db:
        metric_mapping = get_metric_mapping(db)

        repo_parser = RepoParser(repo, tempdir_location=tempdir_location)

        with repo_parser.repo_checked_out():
            previous_sha = get_previous_sha(db)
            commits = repo_parser.get_commits(since_sha=previous_sha)

            # If there is nothing to check gtfo
            if len(commits) == 1 and previous_sha is not None:
                return

            # Maps metric_name to a running value
            metric_values = collections.defaultdict(int)

            # Grab the state of our metrics at the last place
            compare_commit = None
            if previous_sha is not None:
                compare_commit = commits[0]
                metric_values.update(get_metric_values(
                    db, compare_commit.sha,
                ))
                commits = commits[1:]

            for commit in commits:
                if compare_commit is None:
                    diff = repo_parser.get_original_commit(commit.sha)
                else:
                    diff = repo_parser.get_commit_diff(
                        compare_commit.sha, commit.sha,
                    )

                metrics = get_metrics(diff, metric_parsers)
                increment_metric_values(metric_values, metrics)
                insert_metric_values(db, metric_values, metric_mapping, commit)
                insert_metric_changes(db, metrics, metric_mapping, commit)

                compare_commit = commit
Exemplo n.º 12
0
def load_data(
        database_file,
        repo,
        package_names,
        skip_defaults,
):
    metric_parsers = get_metric_parsers_from_args(package_names, skip_defaults)

    with sqlite3.connect(database_file) as db:
        metric_mapping = get_metric_mapping(db)

        repo_parser = RepoParser(repo)

        with repo_parser.repo_checked_out():
            previous_sha = get_previous_sha(db)
            commits = repo_parser.get_commits(since_sha=previous_sha)

            # If there is nothing to check gtfo
            if len(commits) == 1 and previous_sha is not None:
                return

            # Maps metric_name to a running value
            metric_values = collections.defaultdict(int)

            # Grab the state of our metrics at the last place
            compare_commit = None
            if previous_sha is not None:
                compare_commit = commits[0]
                metric_values.update(get_metric_values(
                    db, compare_commit.sha,
                ))
                commits = commits[1:]

            mp_args = six.moves.zip(
                [compare_commit] + commits,
                commits,
                itertools.repeat(repo_parser),
                itertools.repeat(metric_parsers),
            )
            pool = multiprocessing.pool.Pool(15)
            for commit, metrics in six.moves.zip(
                    commits, pool.imap(_get_metrics_inner, mp_args),
            ):
                increment_metric_values(metric_values, metrics)
                insert_metric_values(
                    db, metric_values, metric_mapping, commit,
                )
                insert_metric_changes(db, metrics, metric_mapping, commit)
Exemplo n.º 13
0
def load_data(
        database_file,
        repo,
        package_names,
        skip_defaults,
        tempdir_location,
):
    metric_parsers = get_metric_parsers_from_args(package_names, skip_defaults)

    with sqlite3.connect(database_file) as db:
        metric_mapping = get_metric_mapping(db)

        repo_parser = RepoParser(repo, tempdir_location=tempdir_location)

        with repo_parser.repo_checked_out():
            previous_sha = get_previous_sha(db)
            commits = repo_parser.get_commits(since_sha=previous_sha)

            # If there is nothing to check gtfo
            if len(commits) == 1 and previous_sha is not None:
                return

            # Maps metric_name to a running value
            metric_values = collections.defaultdict(int)

            # Grab the state of our metrics at the last place
            compare_commit = None
            if previous_sha is not None:
                compare_commit = commits[0]
                metric_values.update(get_metric_values(
                    db, compare_commit.sha,
                ))
                commits = commits[1:]

            for commit in commits:
                if compare_commit is None:
                    diff = repo_parser.get_original_commit(commit.sha)
                else:
                    diff = repo_parser.get_commit_diff(
                        compare_commit.sha, commit.sha,
                    )

                metrics = get_metrics(diff, metric_parsers)
                increment_metric_values(metric_values, metrics)
                insert_metric_values(db, metric_values, metric_mapping, commit)
                insert_metric_changes(db, metrics, metric_mapping, commit)

                compare_commit = commit
Exemplo n.º 14
0
def cloneable():
    repo_parser = RepoParser('git://github.com/asottile/git-code-debt')
    with repo_parser.repo_checked_out():
        yield repo_parser.tempdir
Exemplo n.º 15
0
def checked_out_repo(cloneable):
    repo_parser = RepoParser(cloneable)
    with repo_parser.repo_checked_out():
        yield repo_parser