Пример #1
0
  def UpdateLatestMarkers(self, manifest_branch, debug, upload_urls=None):
    """Update the LATEST markers in GS archive area.

    Args:
      manifest_branch: The name of the branch in the manifest for this run.
      debug: Boolean debug value for this run.
      upload_urls: Google storage urls to upload the Latest Markers to.
    """
    if not upload_urls:
      upload_urls = [self.upload_url]
    # self.version will be one of these forms, shown through examples:
    # R35-1234.5.6 or R35-1234.5.6-b123.  In either case, we want "1234.5.6".
    version_marker = self.version.split('-')[1]

    filenames = ('LATEST-%s' % manifest_branch,
                 'LATEST-%s' % version_marker)
    base_archive_path = os.path.dirname(self.archive_path)
    base_upload_urls = [os.path.dirname(url) for url in upload_urls]
    for base_upload_url in base_upload_urls:
      for filename in filenames:
        latest_path = os.path.join(base_archive_path, filename)
        osutils.WriteFile(latest_path, self.version, mode='w')
        commands.UploadArchivedFile(
            base_archive_path, [base_upload_url], filename,
            debug, acl=self.upload_acl)
Пример #2
0
    def UploadArtifact(self, path, archive=True, strict=True):
        """Upload generated artifact to Google Storage.

    Args:
      path: Path of local file to upload to Google Storage
        if |archive| is True. Otherwise, this is the name of the file
        in self.archive_path.
      archive: Whether to automatically copy files to the archive dir.
      strict: Whether to treat upload errors as fatal.
    """
        filename = path
        if archive:
            filename = commands.ArchiveFile(path, self.archive_path)
        upload_urls = self._GetUploadUrls(filename)
        try:
            commands.UploadArchivedFile(self.archive_path,
                                        upload_urls,
                                        filename,
                                        self._run.debug,
                                        update_list=True,
                                        acl=self.acl)
        except failures_lib.GSUploadFailure as e:
            logging.PrintBuildbotStepText('Upload failed')
            if e.HasFatalFailure(whitelist=[
                    gs.GSContextException, timeout_util.TimeoutError
            ]):
                raise
            elif strict:
                raise
            else:
                # Treat gsutil flake as a warning if it's the only problem.
                self._HandleExceptionAsWarning(sys.exc_info())
Пример #3
0
    def _UploadArchiveIndex(self, builder_run):
        """Upload an HTML index for the artifacts at remote archive location.

    If there are no artifacts in the archive then do nothing.

    Args:
      builder_run: BuilderRun object for this run.

    Returns:
      If an index file is uploaded then a dict is returned where each value
        is the same (the URL for the uploaded HTML index) and the keys are
        the boards it applies to, including None if applicable.  If no index
        file is uploaded then this returns None.
    """
        archive = builder_run.GetArchive()
        archive_path = archive.archive_path

        config = builder_run.config
        boards = config.boards
        if boards:
            board_names = ' '.join(boards)
        else:
            boards = [None]
            board_names = '<no board>'

        # See if there are any artifacts found for this run.
        uploaded = os.path.join(archive_path, commands.UPLOADED_LIST_FILENAME)
        if not os.path.exists(uploaded):
            # UPLOADED doesn't exist.  Normal if Archive stage never ran, which
            # is possibly normal.  Regardless, no archive index is needed.
            logging.info('No archived artifacts found for %s run (%s)',
                         builder_run.config.name, board_names)
        else:
            # Prepare html head.
            head_data = {
                'board': board_names,
                'config': config.name,
                'version': builder_run.GetVersion(),
            }
            head = self._HTML_HEAD % head_data

            files = osutils.ReadFile(uploaded).splitlines() + [
                '.|Google Storage Index',
                '..|',
            ]
            index = os.path.join(archive_path, 'index.html')
            # TODO (sbasi) crbug.com/362776: Rework the way we do uploading to
            # multiple buckets. Currently this can only be done in the Archive Stage
            # therefore index.html will only end up in the normal Chrome OS bucket.
            commands.GenerateHtmlIndex(index,
                                       files,
                                       url_base=archive.download_url,
                                       head=head)
            commands.UploadArchivedFile(archive_path, [archive.upload_url],
                                        os.path.basename(index),
                                        debug=self._run.debug,
                                        acl=self.acl)
            return dict((b, archive.download_url) for b in boards)
Пример #4
0
    def _UploadBuildStagesTimeline(self, builder_run, buildbucket_id):
        """Upload an HTML timeline for the build stages at remote archive location.

    Args:
      builder_run: BuilderRun object for this run.
      buildbucket_id: Buildbucket id for the current build.

    Returns:
      If an index file is uploaded then a dict is returned where each value
        is the same (the URL for the uploaded HTML index) and the keys are
        the boards it applies to, including None if applicable.  If no index
        file is uploaded then this returns None.
    """
        archive = builder_run.GetArchive()
        archive_path = archive.archive_path

        config = builder_run.config
        boards = config.boards
        if boards:
            board_names = ' '.join(boards)
        else:
            boards = [None]
            board_names = '<no board>'

        timeline_file = 'timeline-stages.html'
        timeline = os.path.join(archive_path, timeline_file)

        # Gather information about this build from CIDB.
        stages = self.buildstore.GetBuildsStages(
            buildbucket_ids=[buildbucket_id])
        # Many stages are started in parallel after the build finishes. Stages are
        # sorted by start_time first bceause it shows that progression most
        # clearly. Sort by finish_time secondarily to display those paralllel
        # stages cleanly.
        epoch = datetime.datetime.fromtimestamp(0)
        stages.sort(key=lambda stage: (stage['start_time'] or epoch, stage[
            'finish_time'] or epoch))
        rows = ((s['name'], s['start_time'], s['finish_time']) for s in stages)

        # Prepare html head.
        title = ('Build Stages Timeline: %s / %s (%s config)' %
                 (board_names, builder_run.GetVersion(), config.name))

        commands.GenerateHtmlTimeline(timeline, rows, title=title)
        commands.UploadArchivedFile(archive_path, [archive.upload_url],
                                    os.path.basename(timeline),
                                    debug=self._run.options.debug_forced,
                                    update_list=True,
                                    acl=self.acl)
        return os.path.join(archive.download_url_file, timeline_file)
Пример #5
0
    def _UploadSlavesTimeline(self, builder_run, build_id, db):
        """Upload an HTML timeline for the slaves at remote archive location.

    Args:
      builder_run: BuilderRun object for this run.
      build_id: CIDB id for the master build.
      db: CIDBConnection instance.

    Returns:
      The URL of the timeline is returned if slave builds exists.  If no
        slave builds exists then this returns None.
    """
        archive = builder_run.GetArchive()
        archive_path = archive.archive_path

        config = builder_run.config
        boards = config.boards
        if boards:
            board_names = ' '.join(boards)
        else:
            boards = [None]
            board_names = '<no board>'

        timeline_file = 'timeline-slaves.html'
        timeline = os.path.join(archive_path, timeline_file)

        # Gather information about this build from CIDB.
        statuses = db.GetSlaveStatuses(build_id)
        if statuses is None or len(statuses) == 0:
            return None
        # Slaves may be started at slightly different times, but what matters most
        # is which slave is the bottleneck - namely, which slave finishes last.
        # Therefore, sort primarily by finish_time.
        epoch = datetime.datetime.fromtimestamp(0)
        statuses.sort(key=lambda stage: (stage['finish_time'] or epoch, stage[
            'start_time'] or epoch))
        rows = (('%s - %s' % (s['build_config'], s['build_number']),
                 s['start_time'], s['finish_time']) for s in statuses)

        # Prepare html head.
        title = ('Slave Builds Timeline: %s / %s (%s config)' %
                 (board_names, builder_run.GetVersion(), config.name))

        commands.GenerateHtmlTimeline(timeline, rows, title=title)
        commands.UploadArchivedFile(archive_path, [archive.upload_url],
                                    os.path.basename(timeline),
                                    debug=self._run.debug,
                                    acl=self.acl)
        return os.path.join(archive.download_url_file, timeline_file)
Пример #6
0
    def _UploadBuildStagesTimeline(self, builder_run, build_id, db):
        """Upload an HTML timeline for the build stages at remote archive location.

    Args:
      builder_run: BuilderRun object for this run.
      build_id: CIDB id for the current build.
      db: CIDBConnection instance.

    Returns:
      If an index file is uploaded then a dict is returned where each value
        is the same (the URL for the uploaded HTML index) and the keys are
        the boards it applies to, including None if applicable.  If no index
        file is uploaded then this returns None.
    """
        archive = builder_run.GetArchive()
        archive_path = archive.archive_path

        config = builder_run.config
        boards = config.boards
        if boards:
            board_names = ' '.join(boards)
        else:
            boards = [None]
            board_names = '<no board>'

        timeline_file = 'timeline-stages.html'
        timeline = os.path.join(archive_path, timeline_file)

        # Gather information about this build from CIDB.
        stages = db.GetBuildStages(build_id)
        rows = list(
            (s['name'], s['start_time'], s['finish_time']) for s in stages)

        # Prepare html head.
        title = ('Build Stages Timeline: %s / %s (%s config)' %
                 (board_names, builder_run.GetVersion(), config.name))

        commands.GenerateHtmlTimeline(timeline, rows, title=title)
        commands.UploadArchivedFile(archive_path, [archive.upload_url],
                                    os.path.basename(timeline),
                                    debug=self._run.debug,
                                    acl=self.acl)
        return os.path.join(archive.download_url_file, timeline_file)
Пример #7
0
 def _uploader(gs_url, file_path, *args, **kwargs):
     directory, filename = os.path.split(file_path)
     logging.info('Uploading %s to %s', file_path, gs_url)
     commands.UploadArchivedFile(directory, [gs_url], filename, *args,
                                 **kwargs)
Пример #8
0
    def _LinkArtifacts(self, builder_run):
        """Upload an HTML index for the artifacts at remote archive location.

    If there are no artifacts in the archive then do nothing.

    Args:
      builder_run: BuilderRun object for this run.
    """
        archive = builder_run.GetArchive()
        archive_path = archive.archive_path

        boards = builder_run.config.boards
        if boards:
            board_names = ' '.join(boards)
        else:
            boards = [None]
            board_names = '<no board>'

        # See if there are any artifacts found for this run.
        uploaded = os.path.join(archive_path, commands.UPLOADED_LIST_FILENAME)
        if not os.path.exists(uploaded):
            # UPLOADED doesn't exist.  Normal if Archive stage never ran, which
            # is possibly normal.  Regardless, no archive index is needed.
            logging.info('No archived artifacts found for %s run (%s)',
                         builder_run.config.name, board_names)
            return

        if builder_run.config.internal:
            # Internal builds simply link to pantheon directories, which require
            # authenticated access that most Googlers should have.
            artifacts_url = archive.download_url

        else:
            # External builds must allow unauthenticated access to build artifacts.
            # GS doesn't let unauthenticated users browse selected locations without
            # being able to browse everything (which would expose secret stuff).
            # So, we upload an index.html file and link to it instead of the
            # directory.
            title = 'Artifacts Index: %(board)s / %(version)s (%(config)s config)' % {
                'board': board_names,
                'config': builder_run.config.name,
                'version': builder_run.GetVersion(),
            }

            files = osutils.ReadFile(uploaded).splitlines() + [
                '.|Google Storage Index',
                '..|',
            ]

            index = os.path.join(archive_path, 'index.html')

            # TODO (sbasi) crbug.com/362776: Rework the way we do uploading to
            # multiple buckets. Currently this can only be done in the Archive Stage
            # therefore index.html will only end up in the normal Chrome OS bucket.
            commands.GenerateHtmlIndex(index, files, title=title)
            commands.UploadArchivedFile(archive_path, [archive.upload_url],
                                        os.path.basename(index),
                                        debug=self._run.debug,
                                        acl=self.acl)

            artifacts_url = os.path.join(archive.download_url_file,
                                         'index.html')

        links_build_description = '%s/%s' % (builder_run.config.name,
                                             archive.version)
        logging.PrintBuildbotLink('Artifacts[%s]' % links_build_description,
                                  artifacts_url)