Exemple #1
0
  def testExpandWildcards(self):
    path_list = TEMP_FILES_WITH_WILDCARDS[:]
    expected_path_list = TEMP_FILES[:]
    expected_path_list.sort()

    BuildTestFilesTree(self.temp_dir)

    expanded_path_list = archive_utils.ExpandWildcards(self.temp_dir, path_list)
    expanded_path_list.sort()
    self.assertEquals(expected_path_list, expanded_path_list)
Exemple #2
0
  def BuildOldFilesList(self, source_file_name):
    """Build list of files from the old "file of paths" style input.

    Combine any source_file_name inputs found in the default tools dir and in
    any dirs given with --extra-archive-paths.
    """
    default_source = os.path.join(self._tool_dir, source_file_name)
    if os.path.exists(default_source):
      file_list = open(default_source).readlines()
    else:
      print 'WARNING: No default %s list found at %s' % (source_file_name,
                                                         default_source)
      file_list = []
    file_list = [f.strip() for f in file_list]
    file_list.extend(self.GetExtraFiles(self.options.extra_archive_paths,
                                        source_file_name))
    file_list = archive_utils.ExpandWildcards(self._build_dir, file_list)
    return file_list
    def GetExtraFiles(self, extra_archive_paths, source_file_name):
        """Returns a list of extra files to package in the build output directory.

    For each of the paths in the extra_file_paths list, this function
    checks to see if path/source_file_name exists. If so, it expects these
    files to contain a list of newline-separated filenames that it returns
    in a list. The paths in extra_archive_paths are relative to the
    directory specified by --src-dir.
    """
        extra_files_list = []
        extra_path_list = extra_archive_paths.split(',')
        for path in extra_path_list:
            path = path.strip()
            source_file = os.path.join(self._src_dir, path, source_file_name)
            if os.path.exists(source_file):
                new_files_list = open(source_file).readlines()
                extra_files_list.extend(new_files_list)

        extra_files_list = [e.strip() for e in extra_files_list]
        extra_files_list = archive_utils.ExpandWildcards(
            self._build_dir, extra_files_list)
        return extra_files_list
    def ArchiveBuild(self):
        """Zips build files and uploads them, their symbols, and a change log."""
        result = 0
        if self._build_revision is None:
            raise archive_utils.StagingError('No build revision was provided')
        print 'Staging in %s' % self._staging_dir

        fparser = archive_utils.FilesCfgParser(self._files_file,
                                               self.options.mode,
                                               self.options.arch)
        files_list = fparser.ParseLegacyList()
        self._archive_files = archive_utils.ExpandWildcards(
            self._build_dir, files_list)
        archives_list = fparser.ParseArchiveLists()
        # Check files and revision numbers.
        all_files_list = self._archive_files + [
            item['filename'] for sublist in archives_list.values()
            for item in sublist
        ]
        all_files_list.append(self._version_file)
        not_found = archive_utils.VerifyFiles(all_files_list, self._build_dir,
                                              self.options.ignore)
        not_found_optional = []
        for bad_fn in not_found[:]:
            if fparser.IsOptional(bad_fn):
                not_found_optional.append(bad_fn)
                not_found.remove(bad_fn)
                # Remove it from all file lists so we don't try to process it.
                if bad_fn in self._archive_files:
                    self._archive_files.remove(bad_fn)
                for archive_list in archives_list.values():
                    archive_list[:] = [
                        x for x in archive_list if bad_fn != x['filename']
                    ]
        # TODO(mmoss): Now that we can declare files optional in FILES.cfg, should
        # we only allow not_found_optional, and fail on any leftover not_found
        # files?

        print 'last change: %s' % self._build_revision
        previous_revision = self.GetLastBuildRevision()
        # TODO(agable): This conditional only works for svn because git can't easily
        # compare revisions.
        if (slave_utils.GitOrSubversion(self._src_dir) == 'svn'
                and self._build_revision <= previous_revision):
            # If there have been no changes, report it but don't raise an exception.
            # Someone might have pushed the "force build" button.
            print 'No changes since last build (r%s <= r%s)' % (
                self._build_revision, previous_revision)
            return 0

        print 'build name: %s' % self._build_name

        archive_name = 'chrome-%s.zip' % self.TargetPlatformName()
        archive_file = self.CreateArchiveFile(archive_name,
                                              self._archive_files)[1]

        # Handle any custom archives.
        # TODO(mmoss): Largely copied from stage_build.py. Maybe refactor more of
        # this into archive_utils.py.
        archive_files = [archive_file]
        for archive_name in archives_list:
            # The list might be empty if it was all 'not_found' optional files.
            if not archives_list[archive_name]:
                continue
            if fparser.IsDirectArchive(archives_list[archive_name]):
                fileobj = archives_list[archive_name][0]
                # Copy the file to the path specified in archive_name, which might be
                # different than the dirname or basename in 'filename' (allowed by
                # 'direct_archive').
                stage_subdir = os.path.dirname(archive_name)
                stage_fn = os.path.basename(archive_name)
                chromium_utils.MaybeMakeDirectory(
                    os.path.join(self._staging_dir, stage_subdir))
                print 'chromium_utils.CopyFileToDir(%s, %s, dest_fn=%s)' % (
                    os.path.join(self._build_dir, fileobj['filename']),
                    os.path.join(self._staging_dir, stage_subdir), stage_fn)
                if not self.options.dry_run:
                    chromium_utils.CopyFileToDir(
                        os.path.join(self._build_dir, fileobj['filename']),
                        os.path.join(self._staging_dir, stage_subdir),
                        dest_fn=stage_fn)
                archive_files.append(
                    os.path.join(self._staging_dir, archive_name))
            else:
                custom_archive = self.CreateArchiveFile(
                    archive_name,
                    [f['filename'] for f in archives_list[archive_name]])[1]
                print 'Adding %s to be archived.' % (custom_archive)
                archive_files.append(custom_archive)

        # Generate a revisions file which contains the Chromium/WebKit/V8's
        # revision information.
        self.GenerateRevisionFile()

        www_dir = os.path.join(self._www_dir_base, self._build_path_component)
        gs_bucket = self.options.factory_properties.get('gs_bucket', None)
        gs_acl = self.options.factory_properties.get('gs_acl', None)
        gs_base = None
        if gs_bucket:
            gs_base = '/'.join(
                [gs_bucket, self._build_name, self._build_path_component])
        self._UploadBuild(www_dir, self.revisions_path, archive_files, gs_base,
                          gs_acl)

        # Archive Linux packages (if any -- only created for Chrome builds).
        if chromium_utils.IsLinux():
            linux_packages = (glob.glob(
                os.path.join(self._build_dir,
                             '*-r%s_*.deb' % self._chromium_revision)))
            linux_packages.extend(
                glob.glob(
                    os.path.join(self._build_dir,
                                 '*-%s.*.rpm' % self._chromium_revision)))
            for package_file in linux_packages:
                print 'SshCopyFiles(%s, %s, %s)' % (
                    package_file, self.options.archive_host, www_dir)
            if not self.options.dry_run:
                print 'SshMakeDirectory(%s, %s)' % (self.options.archive_host,
                                                    www_dir)
                self.MySshMakeDirectory(self.options.archive_host, www_dir,
                                        gs_base)

                for package_file in linux_packages:
                    self.MyMakeWorldReadable(package_file, gs_base)
                    self.MySshCopyFiles(package_file,
                                        self.options.archive_host,
                                        www_dir,
                                        gs_base,
                                        gs_acl=gs_acl)
                    # Cleanup archived packages, otherwise they keep accumlating since
                    # they have different filenames with each build.
                    os.unlink(package_file)

        self.UploadTests(www_dir, gs_base, gs_acl)

        if not self.options.dry_run:
            # Save the current build revision locally so we can compute a changelog
            # next time
            self.SaveBuildRevisionToSpecifiedFile(self.last_change_file)

            # Record the latest revision in the developer archive directory.
            latest_file_path = os.path.join(self._www_dir_base, 'LATEST')
            if chromium_utils.IsWindows():
                print 'Saving revision to %s' % latest_file_path
                if gs_base:
                    self.CopyFileToGS(self.last_change_file,
                                      gs_base,
                                      '..',
                                      mimetype='text/plain',
                                      gs_acl=gs_acl)
                if not gs_base or self._dual_upload:
                    self.SaveBuildRevisionToSpecifiedFile(latest_file_path)
            elif chromium_utils.IsLinux() or chromium_utils.IsMac():
                # Files are created umask 077 by default, so make it world-readable
                # before pushing to web server.
                self.MyMakeWorldReadable(self.last_change_file, gs_base)
                print 'Saving revision to %s:%s' % (self.options.archive_host,
                                                    latest_file_path)
                self.MySshCopyFiles(self.last_change_file,
                                    self.options.archive_host,
                                    latest_file_path,
                                    gs_base,
                                    '..',
                                    mimetype='text/plain',
                                    gs_acl=gs_acl)
            else:
                raise NotImplementedError(
                    'Platform "%s" is not currently supported.' % sys.platform)

        if len(not_found_optional):
            sys.stderr.write('\n\nINFO: Optional File(s) not found: %s\n' %
                             ', '.join(not_found_optional))
        if len(not_found):
            sys.stderr.write('\n\nWARNING: File(s) not found: %s\n' %
                             ', '.join(not_found))
        return result