Пример #1
0
    def canMerge(self, change, allow_needs, event=None):
        log = get_annotated_logger(self.log, event)
        pagure = self.get_project_api_client(change.project.name)
        pr = pagure.get_pr(change.number)

        mergeable = False
        if pr.get('cached_merge_status') in ('FFORWARD', 'MERGE'):
            mergeable = True

        ci_flag = False
        if self._hasRequiredStatusChecks(change):
            ci_flag = True

        threshold = pr.get('threshold_reached')
        if threshold is None:
            self.log.debug("No threshold_reached attribute found")

        log.debug(
            'PR %s#%s mergeability details mergeable: %s '
            'flag: %s threshold: %s', change.project.name, change.number,
            mergeable, ci_flag, threshold)

        can_merge = mergeable and ci_flag and threshold

        log.info('Check PR %s#%s mergeability can_merge: %s',
                 change.project.name, change.number, can_merge)
        return can_merge
Пример #2
0
 def getFiles(self,
              files,
              dirs=[],
              branch=None,
              commit=None,
              zuul_event_id=None):
     log = get_annotated_logger(self.log, zuul_event_id)
     ret = {}
     repo = self.createRepoObject(zuul_event_id)
     if branch:
         tree = repo.heads[branch].commit.tree
     else:
         tree = repo.commit(commit).tree
     for fn in files:
         if fn in tree:
             if tree[fn].type != 'blob':
                 log.warning("%s: object %s is not a blob", self.local_path,
                             fn)
             ret[fn] = tree[fn].data_stream.read().decode('utf8')
         else:
             ret[fn] = None
     if dirs:
         for dn in dirs:
             if dn not in tree:
                 continue
             for blob in tree[dn].traverse():
                 if blob.path.endswith(".yaml"):
                     ret[blob.path] = blob.data_stream.read().decode(
                         'utf-8')
     return ret
Пример #3
0
    def updateRepo(self,
                   connection_name,
                   project_name,
                   repo_state=None,
                   zuul_event_id=None,
                   build=None,
                   process_worker=None):
        log = get_annotated_logger(self.log, zuul_event_id, build=build)
        repo = self.getRepo(connection_name,
                            project_name,
                            zuul_event_id=zuul_event_id)
        try:

            # Check if we need an update if we got a repo_state
            if repo_state and not repo.isUpdateNeeded(
                    repo_state, zuul_event_id=zuul_event_id):
                log.info("Skipping updating local repository %s/%s",
                         connection_name, project_name)
            else:
                log.info("Updating local repository %s/%s", connection_name,
                         project_name)
                repo.reset(zuul_event_id=zuul_event_id,
                           build=build,
                           process_worker=process_worker)
        except Exception:
            log.exception("Unable to update %s/%s", connection_name,
                          project_name)
            raise
Пример #4
0
    def _onTrigger(self, tenant, pipeline_name, timespec):
        self.log.debug(
            'Got trigger for tenant %s and pipeline %s with '
            'timespec %s', tenant.name, pipeline_name, timespec)
        for project_name, pcs in tenant.layout.project_configs.items():
            # timer operates on branch heads and doesn't need speculative
            # layouts to decide if it should be enqueued or not.
            # So it can be decided on cached data if it needs to run or not.
            pcst = tenant.layout.getAllProjectConfigs(project_name)
            if not [True for pc in pcst if pipeline_name in pc.pipelines]:
                continue

            (trusted, project) = tenant.getProject(project_name)
            for branch in project.source.getProjectBranches(project, tenant):
                event = TimerTriggerEvent()
                event.type = 'timer'
                event.timespec = timespec
                event.forced_pipeline = pipeline_name
                event.project_hostname = project.canonical_hostname
                event.project_name = project.name
                event.ref = 'refs/heads/%s' % branch
                event.branch = branch
                event.zuul_event_id = str(uuid4().hex)
                event.timestamp = time.time()
                log = get_annotated_logger(self.log, event)
                log.debug("Adding event")
                self.sched.addEvent(event)
Пример #5
0
 def setRef(self, path, hexsha, repo=None, zuul_event_id=None):
     log = get_annotated_logger(self.log, zuul_event_id)
     log.debug("Create reference %s at %s in %s", path, hexsha,
               self.local_path)
     if repo is None:
         repo = self.createRepoObject(zuul_event_id)
     self._setRef(path, hexsha, repo)
Пример #6
0
    def enqueueChangesAhead(self,
                            change,
                            event,
                            quiet,
                            ignore_requirements,
                            change_queue,
                            history=None):
        log = get_annotated_logger(self.log, event)

        if hasattr(change, 'number'):
            history = history or []
            history = history + [change]
        else:
            # Don't enqueue dependencies ahead of a non-change ref.
            return True

        ret = self.checkForChangesNeededBy(change, change_queue, event)
        if ret in [True, False]:
            return ret
        log.debug("  Changes %s must be merged ahead of %s", ret, change)
        for needed_change in ret:
            r = self.addChange(needed_change,
                               event,
                               quiet=quiet,
                               ignore_requirements=ignore_requirements,
                               change_queue=change_queue,
                               history=history)
            if not r:
                return False
        return True
Пример #7
0
    def cancel(self, build):
        log = get_annotated_logger(self.log, build.zuul_event_id,
                                   build=build.uuid)
        # Returns whether a running build was canceled
        log.info("Cancel build %s for job %s", build, build.job)

        build.canceled = True
        try:
            job = build.__gearman_job  # noqa
        except AttributeError:
            log.debug("Build has no associated gearman job")
            return False

        if build.__gearman_worker is not None:
            log.debug("Build has already started")
            self.cancelRunningBuild(build)
            log.debug("Canceled running build")
            return True
        else:
            log.debug("Build has not started yet")

        log.debug("Looking for build in queue")
        if self.cancelJobInQueue(build):
            log.debug("Removed build from queue")
            return False

        time.sleep(1)

        log.debug("Still unable to find build to cancel")
        if build.__gearman_worker is not None:
            log.debug("Build has just started")
            self.cancelRunningBuild(build)
            log.debug("Canceled running build")
            return True
        log.error("Unable to cancel build")
Пример #8
0
 def approve_mr(self,
                project_name,
                number,
                patchset,
                approve=True,
                zuul_event_id=None):
     approve = 'approve' if approve else 'unapprove'
     path = "/projects/%s/merge_requests/%s/%s" % (quote_plus(project_name),
                                                   number, approve)
     params = {'sha': patchset} if approve else {}
     resp = self.post(self.baseurl + path,
                      params=params,
                      zuul_event_id=zuul_event_id)
     try:
         self._manage_error(*resp, zuul_event_id=zuul_event_id)
     except GitlabAPIClientException:
         # approve and unapprove endpoint could return code 401 whether the
         # actual state of the Merge Request approval. Two call on approve
         # endpoint the second call return 401.
         # 409 is returned when current HEAD of the merge request doesn't
         # match the 'sha' parameter.
         if resp[1] not in (401, 409):
             raise
         elif approve == 'approve' and resp[1] == 409:
             log = get_annotated_logger(self.log, zuul_event_id)
             log.error('Fail to approve the merge request: %s' % resp[0])
             return
     return resp[0]
Пример #9
0
 def _getChange(self,
                project,
                number,
                patch_number=None,
                refresh=False,
                url=None,
                event=None):
     log = get_annotated_logger(self.log, event)
     key = (project.name, str(number), str(patch_number))
     change = self._change_cache.get(key)
     if change and not refresh:
         log.debug("Getting change from cache %s" % str(key))
         return change
     if not change:
         change = MergeRequest(project.name)
         change.project = project
         change.number = number
         # patch_number is the tips commit SHA of the MR
         change.patchset = patch_number
         change.url = url or self.getMRUrl(project.name, number)
         change.uris = [change.url.split('://', 1)[-1]]  # remove scheme
     self._change_cache[key] = change
     try:
         log.debug("Getting change mr#%s from project %s" %
                   (number, project.name))
         self._updateChange(change, event)
     except Exception:
         if key in self._change_cache:
             del self._change_cache[key]
         raise
     return change
Пример #10
0
    def reviseRequest(self, request, relative_priority=None):
        '''Attempt to update the node request, if it is not currently being
        processed.

        :param: NodeRequest request: The request to update.
        :param relative_priority int: If supplied, the new relative
            priority to set on the request.

        '''
        log = get_annotated_logger(self.log, request.event_id)
        if relative_priority is None:
            return
        try:
            self.sched.zk.lockNodeRequest(request, blocking=False)
        except LockException:
            # It may be locked by nodepool, which is fine.
            log.debug("Unable to revise locked node request %s", request)
            return False
        try:
            old_priority = request.relative_priority
            request.relative_priority = relative_priority
            self.sched.zk.storeNodeRequest(request)
            log.debug("Revised relative priority of "
                      "node request %s from %s to %s",
                      request, old_priority, relative_priority)
        except Exception:
            log.exception("Unable to update node request %s", request)
        finally:
            try:
                self.sched.zk.unlockNodeRequest(request)
            except Exception:
                log.exception("Unable to unlock node request %s", request)
Пример #11
0
    def _updateNodeRequest(self, request, deleted):
        log = get_annotated_logger(self.log, request.event_id)
        # Return False to indicate that we should stop watching the
        # node.
        log.debug("Updating node request %s", request)

        if request.uid not in self.requests:
            log.debug("Request %s is unknown", request.uid)
            return False

        if request.canceled:
            del self.requests[request.uid]
            self.emitStats(request)
            return False

        # TODOv3(jeblair): handle allocation failure
        if deleted:
            log.debug("Resubmitting lost node request %s", request)
            request.id = None
            self.sched.zk.submitNodeRequest(request, self._updateNodeRequest)
        elif request.state in (model.STATE_FULFILLED, model.STATE_FAILED):
            log.info("Node request %s %s", request, request.state)

            # Give our results to the scheduler.
            self.sched.onNodesProvisioned(request)
            del self.requests[request.uid]

            self.emitStats(request)

            # Stop watching this request node.
            return False

        return True
Пример #12
0
 def reset(self, zuul_event_id=None, build=None):
     log = get_annotated_logger(self.log, zuul_event_id, build=build)
     log.debug("Resetting repository %s", self.local_path)
     self.update(zuul_event_id=zuul_event_id, build=build)
     repo = self.createRepoObject(zuul_event_id, build=build)
     origin = repo.remotes.origin
     seen = set()
     head = None
     stale_refs = origin.stale_refs
     # Update our local heads to match the remote, and pick one to
     # reset the repo to.  We don't delete anything at this point
     # because we want to make sure the repo is in a state stable
     # enough for git to operate.
     for ref in origin.refs:
         if ref.remote_head == 'HEAD':
             continue
         if ref in stale_refs:
             continue
         repo.create_head(ref.remote_head, ref, force=True)
         seen.add(ref.remote_head)
         if head is None:
             head = ref.remote_head
     log.debug("Reset to %s", head)
     repo.head.reference = head
     for ref in stale_refs:
         log.debug("Delete stale ref %s", ref.remote_head)
         # A stale ref means the upstream branch (e.g. foobar) was deleted
         # so we need to delete both our local head (if existing) and the
         # remote tracking head. Both repo.heads and ref.remote_head
         # contain the pure branch name so they can be compared easily.
         for head in repo.heads:
             if head.name == ref.remote_head:
                 repo.delete_head(ref.remote_head, force=True)
                 break
         git.refs.RemoteReference.delete(repo, ref, force=True)
Пример #13
0
    def setCommitStatus(self, item):
        log = get_annotated_logger(self.log, item.event)

        project = item.change.project.name
        if hasattr(item.change, 'patchset'):
            sha = item.change.patchset
        elif hasattr(item.change, 'newrev'):
            sha = item.change.newrev
        state = self._commit_status

        url = item.formatStatusUrl()

        description = '%s status: %s' % (item.pipeline.name,
                                         self._commit_status)

        if len(description) >= 140:
            # This pipeline is named with a long name and thus this
            # desciption would overflow the GitHub limit of 1024 bytes.
            # Truncate the description. In practice, anything over 140
            # characters seems to trip the limit.
            description = 'status: %s' % self._commit_status

        log.debug(
            'Reporting change %s, params %s, '
            'context: %s, state: %s, description: %s, url: %s', item.change,
            self.config, self.context, state, description, url)

        self.connection.setCommitStatus(project,
                                        sha,
                                        state,
                                        url,
                                        description,
                                        self.context,
                                        zuul_event_id=item.event)
Пример #14
0
    def getChangeQueue(self, change, event, existing=None):
        log = get_annotated_logger(self.log, event)

        # creates a new change queue for every project-ref
        # combination.
        if existing:
            return DynamicChangeQueueContextManager(existing)

        # Don't use Pipeline.getQueue to find an existing queue
        # because we're matching project and (branch or ref).
        for queue in self.pipeline.queues:
            if (queue.queue[-1].change.project == change.project and
                ((hasattr(change, 'branch') and
                  hasattr(queue.queue[-1].change, 'branch') and
                  queue.queue[-1].change.branch == change.branch) or
                queue.queue[-1].change.ref == change.ref)):
                log.debug("Found existing queue %s", queue)
                return DynamicChangeQueueContextManager(queue)
        change_queue = model.ChangeQueue(
            self.pipeline,
            window=1,
            window_floor=1,
            window_increase_type='none',
            window_decrease_type='none')
        change_queue.addProject(change.project)
        self.pipeline.addQueue(change_queue)
        log.debug("Dynamically created queue %s", change_queue)
        return DynamicChangeQueueContextManager(change_queue)
Пример #15
0
 def _getChange(self, project, number, patchset=None,
                refresh=False, url=None, event=None):
     log = get_annotated_logger(self.log, event)
     key = (project.name, number, patchset)
     change = self._change_cache.get(key)
     if change and not refresh:
         log.debug("Getting change from cache %s" % str(key))
         return change
     if not change:
         change = MergeRequest(project.name)
         change.project = project
         change.number = number
         # patchset is the tips commit of the PR
         change.patchset = patchset
         change.url = url
         change.uris = list(url)
     self._change_cache[key] = change
     try:
         log.debug("Getting change mr#%s from project %s" % (
             number, project.name))
         self._updateChange(change, event)
     except Exception:
         if key in self._change_cache:
             del self._change_cache[key]
         raise
     return change
Пример #16
0
    def _updateChange(self, change, event):
        log = get_annotated_logger(self.log, event)
        log.info("Updating change from Gitlab %s" % change)
        change.mr = self.getMR(change.project.name, change.number, event=event)
        change.ref = "refs/merge-requests/%s/head" % change.number
        change.branch = change.mr['target_branch']
        change.patchset = change.mr['sha']
        change.commit_id = change.mr['diff_refs'].get('head_sha')
        change.owner = change.mr['author'].get('username')
        # Files changes are not part of the Merge Request data
        # See api/merge_requests.html#get-single-mr-changes
        # this endpoint includes file changes information
        change.files = None
        change.title = change.mr['title']
        change.open = change.mr['state'] == 'opened'
        change.is_merged = change.mr['state'] == 'merged'
        # Can be "can_be_merged"
        change.merge_status = change.mr['merge_status']
        change.approved = change.mr['approved']
        change.message = change.mr['description']
        change.labels = change.mr['labels']
        change.updated_at = int(
            dateutil.parser.parse(change.mr['updated_at']).timestamp())
        log.info("Updated change from Gitlab %s" % change)

        if self.sched:
            self.sched.onChangeUpdated(change, event)

        return change
Пример #17
0
    def _updateChange(self, change, event):
        log = get_annotated_logger(self.log, event)
        log.info("Updating change from Gitlab %s" % change)
        change.mr = self.getPull(
            change.project.name, change.number, event=event)
        change.ref = "refs/merge-requests/%s/head" % change.number
        change.branch = change.mr['target_branch']
        change.patchset = change.mr['sha']
        # Files changes are not part of the Merge Request data
        # See api/merge_requests.html#get-single-mr-changes
        # this endpoint includes file changes information
        change.files = None
        change.title = change.mr['title']
        change.open = change.mr['state'] == 'opened'
        change.is_merged = change.mr['merged_at'] is not None
        # Can be "can_be_merged"
        change.merge_status = change.mr['merge_status']
        change.message = change.mr['description']
        change.labels = change.mr['labels']
        change.updated_at = int(datetime.strptime(
            change.mr['updated_at'], '%Y-%m-%dT%H:%M:%S.%fZ').strftime('%s'))
        log.info("Updated change from Gitlab %s" % change)

        if self.sched:
            self.sched.onChangeUpdated(change, event)

        return change
Пример #18
0
 def commentMR(self, project_name, number, message, event=None):
     log = get_annotated_logger(self.log, event)
     self.gl_client.comment_mr(project_name,
                               number,
                               message,
                               zuul_event_id=event)
     log.info("Commented on MR %s#%s", project_name, number)
Пример #19
0
    def __init__(self, remote, local, email, username, speed_limit, speed_time,
                 sshkey=None, cache_path=None, logger=None, git_timeout=300,
                 retry_attempts=3, retry_interval=30, zuul_event_id=None):
        if logger is None:
            self.log = logging.getLogger("zuul.Repo")
        else:
            self.log = logger
        log = get_annotated_logger(self.log, zuul_event_id)
        self.env = {
            'GIT_HTTP_LOW_SPEED_LIMIT': speed_limit,
            'GIT_HTTP_LOW_SPEED_TIME': speed_time,
        }
        self.git_timeout = git_timeout
        self.sshkey = sshkey
        if sshkey:
            self.env['GIT_SSH_COMMAND'] = 'ssh -i %s' % (sshkey,)

        self.remote_url = remote
        self.local_path = local
        self.email = email
        self.username = username
        self.cache_path = cache_path
        self._initialized = False
        self.retry_attempts = retry_attempts
        self.retry_interval = retry_interval
        try:
            self._setup_known_hosts()
        except Exception:
            log.exception("Unable to set up known_hosts for %s", remote)
        try:
            self._ensure_cloned(zuul_event_id)
            self._git_set_remote_url(
                git.Repo(self.local_path), self.remote_url)
        except Exception:
            log.exception("Unable to initialize repo for %s", remote)
Пример #20
0
    def updateCheck(self, item):
        log = get_annotated_logger(self.log, item.event)
        message = self._formatItemReport(item)
        project = item.change.project.name
        pr_number = item.change.number
        sha = item.change.patchset

        # Check if the buildset is finished or not. In case it's finished, we
        # must provide additional parameters when updating the check_run via
        # the Github API later on.
        completed = item.current_build_set.result is not None
        status = self._check

        log.debug(
            "Updating check for change %s, params %s, context %s, message: %s",
            item.change, self.config, self.context, message)

        details_url = item.formatStatusUrl()

        # Check for inline comments that can be reported via checks API
        file_comments = self.getFileComments(item)

        return self.connection.updateCheck(
            project,
            pr_number,
            sha,
            status,
            completed,
            self.context,
            details_url,
            message,
            file_comments,
            zuul_event_id=item.event,
        )
Пример #21
0
    def checkForChangesNeededBy(self, change, change_queue, event):
        log = get_annotated_logger(self.log, event)

        if self.pipeline.ignore_dependencies:
            return True
        log.debug("Checking for changes needed by %s:" % change)
        # Return true if okay to proceed enqueing this change,
        # false if the change should not be enqueued.
        if (hasattr(change, 'commit_needs_changes') and
            (change.refresh_deps or change.commit_needs_changes is None)):
            self.updateCommitDependencies(change, None, event)
        if not hasattr(change, 'needs_changes'):
            log.debug("  %s does not support dependencies" % type(change))
            return True
        if not change.needs_changes:
            log.debug("  No changes needed")
            return True
        changes_needed = []
        for needed_change in change.needs_changes:
            log.debug("  Change %s needs change %s:" % (change, needed_change))
            if needed_change.is_merged:
                log.debug("  Needed change is merged")
                continue
            if self.isChangeAlreadyInQueue(needed_change, change_queue):
                log.debug("  Needed change is already ahead in the queue")
                continue
            log.debug("  Change %s is needed" % needed_change)
            if needed_change not in changes_needed:
                changes_needed.append(needed_change)
                continue
            # This differs from the dependent pipeline check in not
            # verifying that the dependent change is mergable.
        if changes_needed:
            return changes_needed
        return True
Пример #22
0
    def report(self, item):
        """Send a message to gerrit."""
        log = get_annotated_logger(self.log, item.event)

        # If the source is no GerritSource we cannot report anything here.
        if not isinstance(item.change.project.source, GerritSource):
            return

        # For supporting several Gerrit connections we also must filter by
        # the canonical hostname.
        if item.change.project.source.connection.canonical_hostname != \
                self.connection.canonical_hostname:
            return

        comments = self._getFileComments(item)
        self.filterComments(item, comments)
        message = self._formatItemReport(item)

        log.debug("Report change %s, params %s, message: %s, comments: %s",
                  item.change, self.config, message, comments)
        item.change._ref_sha = item.change.project.source.getRefSha(
            item.change.project, 'refs/heads/' + item.change.branch)

        return self.connection.review(item.change,
                                      message,
                                      self.config,
                                      comments,
                                      zuul_event_id=item.event)
Пример #23
0
    def canMerge(self, change, allow_needs, event=None):
        log = get_annotated_logger(self.log, event)
        pagure = self.get_project_api_client(change.project.name)
        pr = pagure.get_pr(change.number)

        mergeable = False
        if pr.get('cached_merge_status') in ('FFORWARD', 'MERGE'):
            mergeable = True

        ci_flag = False
        if self._hasRequiredStatusChecks(change):
            ci_flag = True

        # By default project get -1 in "Minimum score to merge pull-request"
        # But this makes the API to return None for threshold_reached. We need
        # to handle this case as threshold_reached: True because it means
        # no minimal score configured.
        threshold = pr.get('threshold_reached')
        if threshold is None:
            threshold = True

        log.debug(
            'PR %s#%s mergeability details mergeable: %s '
            'flag: %s threshold: %s', change.project.name, change.number,
            mergeable, ci_flag, threshold)

        can_merge = mergeable and ci_flag and threshold

        log.info('Check PR %s#%s mergeability can_merge: %s',
                 change.project.name, change.number, can_merge)
        return can_merge
Пример #24
0
    def _ssh(self, command, stdin_data=None, zuul_event_id=None):
        log = get_annotated_logger(self.log, zuul_event_id)
        if not self.client:
            self._open()

        try:
            log.debug("SSH command:\n%s", command)
            stdin, stdout, stderr = self.client.exec_command(command)
        except Exception:
            self._open()
            stdin, stdout, stderr = self.client.exec_command(command)

        if stdin_data:
            stdin.write(stdin_data)

        out = stdout.read().decode('utf-8')
        self.iolog.debug("SSH received stdout:\n%s" % out)

        ret = stdout.channel.recv_exit_status()
        log.debug("SSH exit status: %s", ret)

        err = stderr.read().decode('utf-8')
        if err.strip():
            log.debug("SSH received stderr:\n%s", err)

        if ret:
            log.debug("SSH received stdout:\n%s", out)
            raise Exception("Gerrit error executing %s" % command)
        return (out, err)
Пример #25
0
 def cherryPick(self, ref, zuul_event_id=None):
     log = get_annotated_logger(self.log, zuul_event_id)
     repo = self.createRepoObject(zuul_event_id)
     log.debug("Cherry-picking %s", ref)
     self.fetch(ref, zuul_event_id=zuul_event_id)
     repo.git.cherry_pick("FETCH_HEAD")
     return repo.head.commit
Пример #26
0
        def _query_chunk(query, event):
            args = '--commit-message --current-patch-set'

            cmd = 'gerrit query --format json %s %s' % (args, query)
            out, err = self._ssh(cmd)
            if not out:
                return False
            lines = out.split('\n')
            if not lines:
                return False

            # filter out blank lines
            data = [json.loads(line) for line in lines if line.startswith('{')]

            # check last entry for more changes
            more_changes = None
            if 'moreChanges' in data[-1]:
                more_changes = data[-1]['moreChanges']

            # we have to remove the statistics line
            del data[-1]

            if not data:
                return False, more_changes
            iolog = get_annotated_logger(self.iolog, event)
            iolog.debug("Received data from Gerrit query: \n%s",
                        pprint.pformat(data))
            return data, more_changes
Пример #27
0
    def _addProject(self, hostname, project_name, url, sshkey, zuul_event_id):
        repo = None
        key = '/'.join([hostname, project_name])
        try:
            path = os.path.join(self.working_root, hostname, project_name)
            if self.cache_root:
                cache_path = os.path.join(self.cache_root, hostname,
                                          project_name)
            else:
                cache_path = None
            repo = Repo(url,
                        path,
                        self.email,
                        self.username,
                        self.speed_limit,
                        self.speed_time,
                        sshkey=sshkey,
                        cache_path=cache_path,
                        logger=self.logger,
                        git_timeout=self.git_timeout,
                        zuul_event_id=zuul_event_id)

            self.repos[key] = repo
        except Exception:
            log = get_annotated_logger(self.log, zuul_event_id)
            log.exception("Unable to add project %s/%s", hostname,
                          project_name)
        return repo
Пример #28
0
 def get(self, url, zuul_event_id=None):
     log = get_annotated_logger(self.log, zuul_event_id)
     log.debug("Getting resource %s ..." % url)
     ret = self.session.get(url, headers=self.headers)
     log.debug("GET returned (code: %s): %s" % (
         ret.status_code, ret.text))
     return ret.json(), ret.status_code, ret.url, 'GET'
Пример #29
0
 def _restoreRepoState(self,
                       connection_name,
                       project_name,
                       repo,
                       repo_state,
                       zuul_event_id,
                       process_worker=None):
     log = get_annotated_logger(self.log, zuul_event_id)
     projects = repo_state.get(connection_name, {})
     project = projects.get(project_name, {})
     if not project:
         # We don't have a state for this project.
         return
     log.debug("Restore repo state for project %s/%s", connection_name,
               project_name)
     if process_worker is None:
         repo.setRefs(project,
                      keep_remotes=self.execution_context,
                      zuul_event_id=zuul_event_id)
     else:
         job = process_worker.submit(Repo.setRefsAsync,
                                     repo.local_path,
                                     project,
                                     keep_remotes=self.execution_context)
         messages = job.result()
         for message in messages:
             log.debug(message)
Пример #30
0
    def onBuildCompleted(self, job):
        data = getJobData(job)
        zuul_event_id = data.get('zuul_event_id')
        log = get_annotated_logger(self.log, zuul_event_id)

        merged = data.get('merged', False)
        job.updated = data.get('updated', False)
        commit = data.get('commit')
        files = data.get('files', {})
        repo_state = data.get('repo_state', {})
        item_in_branches = data.get('item_in_branches', [])
        job.files = files
        log.info(
            "Merge %s complete, merged: %s, updated: %s, "
            "commit: %s, branches: %s", job, merged, job.updated, commit,
            item_in_branches)
        job.setComplete()
        if job.build_set:
            if job.name == 'merger:fileschanges':
                self.sched.onFilesChangesCompleted(job.build_set, files)
            else:
                self.sched.onMergeCompleted(job.build_set, merged, job.updated,
                                            commit, files, repo_state,
                                            item_in_branches)
        # The test suite expects the job to be removed from the
        # internal account after the wake flag is set.
        self.jobs.remove(job)