Beispiel #1
0
    def _handle(self, *args, **options):
        project = options['project']
        changeset = options['changeset']

        if not options['last_n_pushes'] and not changeset:
            raise CommandError('must specify --last-n-pushes or a positional '
                               'changeset argument')

        # get reference to repo
        repo = Repository.objects.get(name=project, active_status='active')

        if options['last_n_pushes']:
            last_push_id = last_push_id_from_server(repo)
            fetch_push_id = max(1, last_push_id - options['last_n_pushes'])
            logger.info('last server push id: %d; fetching push %d and newer' %
                        (last_push_id, fetch_push_id))
        else:
            fetch_push_id = None

        # make sure all tasks are run synchronously / immediately
        settings.CELERY_ALWAYS_EAGER = True

        # get hg pushlog
        pushlog_url = '%s/json-pushes/?full=1&version=2' % repo.url

        # ingest this particular revision for this project
        process = HgPushlogProcess()
        # Use the actual push SHA, in case the changeset specified was a tag
        # or branch name (eg tip). HgPushlogProcess returns the full SHA.
        push_sha = process.run(pushlog_url,
                               project,
                               changeset=changeset,
                               last_push_id=fetch_push_id)

        # Only perform additional processing if fetching a single changeset
        # because we only have the sha1 if the tip-most push in "last N pushes"
        # mode and can't filter appropriately.
        if not fetch_push_id:
            group_filter = options['filter_job_group']
            Builds4hJobsProcess().run(project_filter=project,
                                      revision_filter=push_sha,
                                      job_group_filter=group_filter)
            PendingJobsProcess().run(project_filter=project,
                                     revision_filter=push_sha,
                                     job_group_filter=group_filter)
            RunningJobsProcess().run(project_filter=project,
                                     revision_filter=push_sha,
                                     job_group_filter=group_filter)
Beispiel #2
0
    def _handle(self, *args, **options):
        project = options['project']
        changeset = options['changeset']

        if not options['last_n_pushes'] and not changeset:
            raise CommandError('must specify --last-n-pushes or a positional '
                               'changeset argument')

        # get reference to repo
        repo = Repository.objects.get(name=project, active_status='active')

        if options['last_n_pushes']:
            last_push_id = last_push_id_from_server(repo)
            fetch_push_id = max(1, last_push_id - options['last_n_pushes'])
            logger.info('last server push id: %d; fetching push %d and newer'
                        % (last_push_id, fetch_push_id))
        else:
            fetch_push_id = None

        # make sure all tasks are run synchronously / immediately
        settings.CELERY_ALWAYS_EAGER = True

        # get hg pushlog
        pushlog_url = '%s/json-pushes/?full=1&version=2' % repo.url

        # ingest this particular revision for this project
        process = HgPushlogProcess()
        # Use the actual push SHA, in case the changeset specified was a tag
        # or branch name (eg tip). HgPushlogProcess returns the full SHA.
        push_sha = process.run(pushlog_url, project, changeset=changeset,
                               last_push_id=fetch_push_id)

        # Only perform additional processing if fetching a single changeset
        # because we only have the sha1 if the tip-most push in "last N pushes"
        # mode and can't filter appropriately.
        if not fetch_push_id:
            group_filter = options['filter_job_group']
            Builds4hJobsProcess().run(project_filter=project,
                                      revision_filter=push_sha,
                                      job_group_filter=group_filter)
            PendingJobsProcess().run(project_filter=project,
                                     revision_filter=push_sha,
                                     job_group_filter=group_filter)
            RunningJobsProcess().run(project_filter=project,
                                     revision_filter=push_sha,
                                     job_group_filter=group_filter)
Beispiel #3
0
def ingest_hg_push(options):
    if not options["enable_eager_celery"]:
        logger.info(
            "If you want all logs to be parsed use --enable-eager-celery")
    else:
        # Make sure all tasks are run synchronously / immediately
        settings.CELERY_TASK_ALWAYS_EAGER = True

    # get reference to repo and ingest this particular revision for this project
    project = options["project"]
    commit = options["commit"]

    if not options['last_n_pushes'] and not commit:
        raise CommandError(
            'must specify --last_n_pushes or a positional commit argument')
    elif options['last_n_pushes'] and options['ingest_all_tasks']:
        raise CommandError(
            'Can\'t specify last_n_pushes and ingest_all_tasks at same time')
    elif options['last_n_pushes'] and options['commit']:
        raise CommandError(
            'Can\'t specify last_n_pushes and commit/revision at the same time'
        )
    repo = Repository.objects.get(name=project, active_status="active")
    fetch_push_id = None

    if options['last_n_pushes']:
        last_push_id = last_push_id_from_server(repo)
        fetch_push_id = max(1, last_push_id - options['last_n_pushes'])
        logger.info(
            'last server push id: %d; fetching push %d and newer',
            last_push_id,
            fetch_push_id,
        )
    elif options["ingest_all_tasks"]:
        gecko_decision_task = get_decision_task_id(project, commit,
                                                   repo.tc_root_url)
        logger.info("## START ##")
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            processTasks(gecko_decision_task, repo.tc_root_url))
        logger.info("## END ##")
    else:
        logger.info(
            "You can ingest all tasks for a push with -a/--ingest-all-tasks.")

    _ingest_hg_push(project, commit)
Beispiel #4
0
    def _handle(self, *args, **options):
        project = options['project']
        changeset = options['changeset']

        if not options['last_n_pushes'] and not changeset:
            raise CommandError('must specify --last-n-pushes or a positional '
                               'changeset argument')

        # get reference to repo
        repo = Repository.objects.get(name=project, active_status='active')

        if options['last_n_pushes']:
            last_push_id = last_push_id_from_server(repo)
            fetch_push_id = max(1, last_push_id - options['last_n_pushes'])
            logger.info('last server push id: %d; fetching push %d and newer',
                        last_push_id, fetch_push_id)
        else:
            fetch_push_id = None

        # make sure all tasks are run synchronously / immediately
        settings.CELERY_ALWAYS_EAGER = True

        # get hg pushlog
        pushlog_url = '%s/json-pushes/?full=1&version=2' % repo.url

        # ingest this particular revision for this project
        process = HgPushlogProcess()
        # Use the actual push SHA, in case the changeset specified was a tag
        # or branch name (eg tip). HgPushlogProcess returns the full SHA.
        process.run(pushlog_url,
                    project,
                    changeset=changeset,
                    last_push_id=fetch_push_id)

        # Only perform additional processing if fetching a single changeset
        # because we only have the sha1 if the tip-most push in "last N pushes"
        # mode and can't filter appropriately.
        if not fetch_push_id:
            raise CommandError(
                'This command is not yet able to ingest Taskcluster jobs automatically. '
                'Please manually configure pulse job ingestion using this guide: '
                'https://treeherder.readthedocs.io/pulseload.html')
Beispiel #5
0
    def _handle(self, *args, **options):
        project = options['project']
        changeset = options['changeset']

        if not options['last_n_pushes'] and not changeset:
            raise CommandError('must specify --last-n-pushes or a positional '
                               'changeset argument')

        # get reference to repo
        repo = Repository.objects.get(name=project, active_status='active')

        if options['last_n_pushes']:
            last_push_id = last_push_id_from_server(repo)
            fetch_push_id = max(1, last_push_id - options['last_n_pushes'])
            logger.info('last server push id: %d; fetching push %d and newer',
                        last_push_id, fetch_push_id)
        else:
            fetch_push_id = None

        # make sure all tasks are run synchronously / immediately
        settings.CELERY_TASK_ALWAYS_EAGER = True

        # get hg pushlog
        pushlog_url = '%s/json-pushes/?full=1&version=2' % repo.url

        # ingest this particular revision for this project
        process = HgPushlogProcess()
        # Use the actual push SHA, in case the changeset specified was a tag
        # or branch name (eg tip). HgPushlogProcess returns the full SHA.
        process.run(pushlog_url, project, changeset=changeset, last_push_id=fetch_push_id)

        # Only perform additional processing if fetching a single changeset
        # because we only have the sha1 if the tip-most push in "last N pushes"
        # mode and can't filter appropriately.
        if not fetch_push_id:
            raise CommandError(
                'This command is not yet able to ingest Taskcluster jobs automatically. '
                'Please manually configure pulse job ingestion using this guide: '
                'https://treeherder.readthedocs.io/pulseload.html'
            )
Beispiel #6
0
    def handle(self, *args, **options):
        typeOfIngestion = options["ingestion_type"][0]
        root_url = options["root_url"]

        if typeOfIngestion == "task":
            assert options["taskId"]
            loop.run_until_complete(handleTaskId(options["taskId"], root_url))
        elif typeOfIngestion == "pr":
            assert options["prUrl"]
            pr_url = options["prUrl"]
            splitUrl = pr_url.split("/")
            org = splitUrl[3]
            repo = splitUrl[4]
            pulse = {
                "exchange": "exchange/taskcluster-github/v1/pull-request",
                "routingKey": "primary.{}.{}.synchronize".format(org, repo),
                "payload": {
                    "repository": repo,
                    "organization": org,
                    "action": "synchronize",
                    "details": {
                        "event.pullNumber":
                        splitUrl[6],
                        "event.base.repo.url":
                        "https://github.com/{}/{}.git".format(org, repo),
                        "event.head.repo.url":
                        "https://github.com/{}/{}.git".format(org, repo),
                    },
                }
            }
            PushLoader().process(pulse["payload"], pulse["exchange"], root_url)
        elif typeOfIngestion.find("git") > -1:
            if not os.environ.get("GITHUB_TOKEN"):
                logger.warning(
                    "If you don't set up GITHUB_TOKEN you might hit Github's rate limiting. See docs for info."
                )

            if typeOfIngestion == "git-push":
                ingest_git_push(options["project"], options["commit"])
            elif typeOfIngestion == "git-pushes":
                ingest_git_pushes(options["project"], options["dryRun"])
        elif typeOfIngestion == "push":
            if not options["enable_eager_celery"]:
                logger.info(
                    "If you want all logs to be parsed use --enable-eager-celery"
                )
            else:
                # Make sure all tasks are run synchronously / immediately
                settings.CELERY_TASK_ALWAYS_EAGER = True

            # get reference to repo and ingest this particular revision for this project
            project = options["project"]
            commit = options["commit"]

            if not options['last_n_pushes'] and not commit:
                raise CommandError(
                    'must specify --last_n_pushes or a positional commit argument'
                )
            elif options['last_n_pushes'] and options['ingest_all_tasks']:
                raise CommandError(
                    'Can\'t specify last_n_pushes and ingest_all_tasks at same time'
                )
            elif options['last_n_pushes'] and options['commit']:
                raise CommandError(
                    'Can\'t specify last_n_pushes and commit/revision at the same time'
                )
            # get reference to repo
            repo = Repository.objects.get(name=project, active_status="active")
            fetch_push_id = None

            if options['last_n_pushes']:
                last_push_id = last_push_id_from_server(repo)
                fetch_push_id = max(1, last_push_id - options['last_n_pushes'])
                logger.info(
                    'last server push id: %d; fetching push %d and newer',
                    last_push_id, fetch_push_id)
            elif options["ingest_all_tasks"]:
                gecko_decision_task = get_decision_task_id(
                    project, commit, repo.tc_root_url)
                logger.info("## START ##")
                loop.run_until_complete(
                    processTasks(gecko_decision_task, repo.tc_root_url))
                logger.info("## END ##")
            else:
                logger.info(
                    "You can ingest all tasks for a push with -a/--ingest-all-tasks."
                )

            # get hg pushlog
            pushlog_url = "%s/json-pushes/?full=1&version=2" % repo.url
            # ingest this particular revision for this project
            process = HgPushlogProcess()
            # Use the actual push SHA, in case the changeset specified was a tag
            # or branch name (eg tip). HgPushlogProcess returns the full SHA.
            process.run(pushlog_url,
                        project,
                        changeset=commit,
                        last_push_id=fetch_push_id)