Ejemplo n.º 1
0
    def schedule_run(self, commit: str, branch: str, job_name: str = "test"):
        self.project_config.git.main_repo.set_fake_head(commit=commit,
                                                        branch=branch)

        context = dict(**self.project_config.context)

        init_job = self.project_config.jobs[0]

        if init_job.db_broken_count() >= init_job.retries:
            logger.warning(
                f"Skipping job '{init_job}' since it already has {init_job.db_broken_count()} broken builds"
            )
            yield 0
        else:
            test_job = self.project_config.get(job_name)
            for schedule_document, schedule_index in self._process_job(
                    test_job, context):
                scheduled_already = DBStats.get_schedule_repetitions(
                    schedule_index)
                schedule_document.details.repetitions -= scheduled_already
                if schedule_document.details.repetitions > 0:
                    Mongo().col_scheduler.insert(schedule_document)
                    logger.debug(
                        f"Inserted {schedule_document.details.repetitions} requests for the job:\n{test_job.pretty_index}"
                    )
                    yield schedule_document.details.repetitions
                else:
                    logger.debug(
                        f"Already scheduled {scheduled_already} runs, which is more than enough:\n{test_job.pretty_index}"
                    )
Ejemplo n.º 2
0
def save_index_info(job, context: Dict):
    """
    :type job: cihpc.config.types.project_config_job.ProjectConfigJob
    """
    index = get_index(job, context)
    index_info = create_index_info(index, job)

    try:
        Mongo().col_index_info.insert(index_info)
    except Exception as e:
        logger.error(f"Could not save info to db: {e}")
Ejemplo n.º 3
0
def test():
    _current_dir = Path(__file__).absolute().parent
    args = parse_worker_args([
        "--cfg",
        str(_current_dir), "--vars",
        str(_current_dir) + "/user/desired_variables.yaml", "--rnd=true"
    ])

    # force commit
    args.commit = "e88a8335648b08843aa58062c2488120c458d737"

    args.commit = "ec5aa26c2c53802e4f5c1d73c0baa1b756640715"

    args.commit = "25bf49f2c16f5cc3d167c5022398942dc2371c4d"

    args.commit = "61dd0ec01b450342a1eb2037a63f6f47d3071f65"

    # read yaml
    G.init(args)
    logger.info(f"Using workdir: {G.project_work_dir}")
    config = get_project_config(args)
    Mongo.set_default_project(config.name)

    config.execute()
Ejemplo n.º 4
0
    def execute(self, context: Dict):
        logger.info(f"Collecting artifacts...")
        extra = configure_recursive(self.extra, context)

        files = list()
        for file in self.files:
            path = Path(configure(file, context)).absolute()

            match = glob.glob(str(path), recursive=True)
            logger.info(f"searching path {str(path)}, found: {match}")
            files.extend(match)

        collector_type = _collectors[self.module]
        collector = collector_type(context, extra)

        for file in files:
            try:
                reports = collector.process_file(Path(file))
            except Exception as e:
                logger.info(
                    f"Failed to obtain timers from {file} using {collector_type}: {e}"
                )
                logger.debug(Path(file).read_text())
                continue

            # convert to ColTimer
            timers = [ColTimer(**report) for report in reports]

            if not timers:
                logger.info(f"Did not found any timers in {file}")
                continue

            if self.save_to_db:
                # save to db
                try:
                    Mongo().col_timers.insert_many(timers)
                except Exception as e:
                    logger.error(f"Failed to save data to db: {e}")

            for timer in timers:
                yield timer.index
Ejemplo n.º 5
0
from cihpc.shared.utils import data_util

ASCENDING = 1
"""Ascending sort order."""
DESCENDING = -1
"""Descending sort order."""

if __name__ == '__main__':
    args = parse_scheduler_args()

    # read yaml
    G.init(args)
    logger.info(f"Using workdir: {G.project_work_dir}")

    project_config = get_project_config(args)
    Mongo.set_default_project(project_config.name)

    cursor = Mongo().col_scheduler.find(
        TimerIndex(
            project=project_config.name,
            status=ColScheduleStatus.NotProcessed,
        ),
        alter_cursor=lambda x: x.sort("_id", DESCENDING))

    scheduler_items = list(cursor)
    total = len(scheduler_items)

    for i, schedule in enumerate(scheduler_items):
        logger.info(f"Schedule {i+1:d}/{total:2d} starting")
        _id = schedule.id
        index = schedule.index
Ejemplo n.º 6
0
import sys
MIN_PYTHON = 3, 6

if sys.version_info < MIN_PYTHON:
    sys.exit("Error: Python %s.%s or later is required.\n" % MIN_PYTHON)

from loguru import logger

from cihpc.config import get_project_config
from cihpc.parsers.main import parse_scheduler_args
from cihpc.repo.repo_util import RepoUtil
from cihpc.shared.db.mongo_db import Mongo
from cihpc.shared.g import G

if __name__ == '__main__':
    args = parse_scheduler_args()

    # read yaml
    G.init(args)
    logger.info(f"Using workdir: {G.project_work_dir}")

    project_config = get_project_config(args)
    Mongo.set_default_project(project_config.name)

    project_config.initialize(with_git=False)
    repo = RepoUtil(dir=G.project_work_dir / '.cihpc' / '.repo',
                    url=project_config.git.main_repo.url,
                    project_config=project_config)
    repo.to_latest()
    repo.schedule_runs(max_per_branch=args.per_branch)
Ejemplo n.º 7
0
    def extract_info(self, per_branch, max_age, single_branch=None):
        logger.info("obtaining commit details")
        branches = single_branch if single_branch else get_active_branches(
            self.repo, max_age)

        info: Dict[Commit, List[str]] = defaultdict(list)
        documents = list()

        for branch in branches:
            branch_head = None if isinstance(branch, str) else branch.head
            branch_name = branch if not branch_head else str(branch.head)
            branch_full = f"origin/{branch_name}" if not branch_name.startswith(
                "origin/") else branch_name
            branch_short = branch_full[7:]

            for commit in iter_revision(self.repo,
                                        branch_head or branch_full,
                                        limit=per_branch,
                                        first_parent=False):
                info[commit].append(branch_short)

        for commit, branches in info.items():
            doc = ColRepoInfo()
            doc.author = commit.author.name
            doc.email = commit.author.email
            doc.commit = commit.hexsha
            doc.branches = branches
            doc.branch = None if len(branches) > 1 else branches[0]
            doc.authored_datetime = commit.authored_datetime
            doc.committed_datetime = commit.committed_datetime
            doc.message = commit.message
            doc.distance = -1
            doc.parents = [c.hexsha for c in commit.parents]
            documents.append(doc)

        logger.info("comparing changes in db")
        # to_be_updated = [doc.commit for doc in documents]
        # rexisting_cmts = [x.commit for x in Mongo().col_repo_info.find({}, {"commit": 1})]
        results = Mongo().col_repo_info.find(
            {"commit": in_list([doc.commit for doc in documents])}, ["commit"],
            raw=True)

        logger.info("traversing parents")
        documents = self._update_edges(documents)

        existing = [r.commit for r in results]
        filtered = [d for d in documents if d.commit not in existing]
        logger.info(
            f"inspected total of {len(documents)} commits, {len(filtered)} new ones"
        )

        if filtered:
            Mongo().col_repo_info.insert_many(filtered)
        else:
            logger.info(f"no new commits to add...")

        logger.info("updating commit parents and children")
        changes = list(Mongo().col_repo_info.batch_update(
            documents,
            lambda x: dict(commit=x.commit),
            lambda x: dict(parents=x.parents, children=x.children),
        ))
        logger.info(f"updated {len(changes)} parents and children")
Ejemplo n.º 8
0
 def _try_execute(func, default):
     try:
         return func(Mongo())
     except Exception as e:
         logger.warning(f"Mongo command failed: {e}")
         return default