def _handle(self, *args, **options): if len(args) != 2: raise CommandError("Need to specify (only) branch and changeset") (project, changeset) = args # get reference to repo rdm = RefDataManager() repos = filter(lambda x: x['name'] == project, rdm.get_all_repository_info()) if not repos: raise CommandError("No project found named '%s'" % project) repo = repos[0] # make sure all tasks are run synchronously / immediately settings.CELERY_ALWAYS_EAGER = True # get hg pushlog pushlog_url = '%s/json-pushes/?full=1&version=2' % repo['url'] # ingest this particular revision for this project process = HgPushlogProcess() # Use the actual push SHA, in case the changeset specified was a tag # or branch name (eg tip). HgPushlogProcess returns the full SHA. push_sha = process.run(pushlog_url, project, changeset=changeset)[:12] Builds4hJobsProcess().run(project_filter=project, revision_filter=push_sha, job_group_filter=options['filter_job_group']) PendingJobsProcess().run(project_filter=project, revision_filter=push_sha, job_group_filter=options['filter_job_group']) RunningJobsProcess().run(project_filter=project, revision_filter=push_sha, job_group_filter=options['filter_job_group'])
def _handle(self, *args, **options): if len(args) != 2: raise CommandError("Need to specify (only) branch and changeset") (project, changeset) = args # get reference to repo rdm = RefDataManager() repos = filter(lambda x: x['name'] == project, rdm.get_all_repository_info()) if not repos: raise CommandError("No project found named '%s'" % project) repo = repos[0] # make sure all tasks are run synchronously / immediately settings.CELERY_ALWAYS_EAGER = True # get hg pushlog pushlog_url = '%s/json-pushes/?full=1' % repo['url'] # ingest this particular revision for this project process = HgPushlogProcess() process.run(pushlog_url, project, changeset=changeset) self._process_all_objects_for_project(project) Builds4hJobsProcess().run(filter_to_project=project, filter_to_revision=changeset) PendingJobsProcess().run(filter_to_project=project, filter_to_revision=changeset) RunningJobsProcess().run(filter_to_project=project, filter_to_revision=changeset) self._process_all_objects_for_project(project)
def fetch_push_logs(): """ Run several fetch_hg_push_log subtasks, one per repository """ rdm = RefDataManager() try: repos = filter(lambda x: x['url'], rdm.get_all_repository_info()) # create a group of subtasks and apply them g = group(fetch_hg_push_log.si(repo['name'], repo['url']) for repo in repos if repo['dvcs_type'] == 'hg') g() finally: rdm.disconnect()
def fetch_push_logs(): """ Run several fetch_hg_push_log subtasks, one per repository """ rdm = RefDataManager() try: repos = filter(lambda x: x['url'], rdm.get_all_repository_info()) for repo in repos: if repo['dvcs_type'] == 'hg': fetch_hg_push_log.apply_async(args=(repo['name'], repo['url']), routing_key='pushlog') finally: rdm.disconnect()
def fetch_push_logs(): """ Run several fetch_hg_push_log subtasks, one per repository """ rdm = RefDataManager() try: repos = filter(lambda x: x['url'], rdm.get_all_repository_info()) for repo in repos: if repo['dvcs_type'] == 'hg': fetch_hg_push_log.apply_async( args=(repo['name'], repo['url']), routing_key='pushlog' ) finally: rdm.disconnect()
def fetch_missing_push_logs(missing_pushlogs): """ Run several fetch_hg_push_log subtasks, one per repository """ rdm = RefDataManager() try: repos = filter(lambda x: x['url'], rdm.get_all_repository_info()) for repo in repos: if repo['dvcs_type'] == 'hg' and repo['name'] in missing_pushlogs: # we must get them one at a time, because if ANY are missing # from json-pushes, it'll return a 404 for the group. for resultset in missing_pushlogs[repo['name']]: fetch_missing_hg_push_logs.apply_async( args=(repo['name'], repo['url'], resultset), routing_key='pushlog') finally: rdm.disconnect()
def fetch_missing_push_logs(missing_pushlogs): """ Run several fetch_hg_push_log subtasks, one per repository """ rdm = RefDataManager() try: repos = filter(lambda x: x['url'], rdm.get_all_repository_info()) for repo in repos: if repo['dvcs_type'] == 'hg' and repo['name'] in missing_pushlogs: # we must get them one at a time, because if ANY are missing # from json-pushes, it'll return a 404 for the group. for resultset in missing_pushlogs[repo['name']]: fetch_missing_hg_push_logs.apply_async(args=( repo['name'], repo['url'], resultset ), routing_key='pushlog' ) finally: rdm.disconnect()