Exemplo n.º 1
0
  def testParseArchiveLists(self):
    STATIC_ARCHIVE = 'static_archive.zip'
    ARCHIVE_1 = 'archive_1.zip'
    ARCHIVE_2 = 'archive_2.zip'
    ARCHIVENAMES = [STATIC_ARCHIVE, ARCHIVE_1, ARCHIVE_2]
    files_cfg = CreateTestFilesCfg(self.temp_dir)
    arch = '64bit'
    buildtype = 'official'
    fparser = archive_utils.FilesCfgParser(files_cfg, buildtype, arch)
    archives = fparser.ParseArchiveLists()
    self.assertEqual(archives.keys(), [STATIC_ARCHIVE])
    self.assertItemsEqual([x['filename'] for x in archives[STATIC_ARCHIVE]],
                     ['archive_allany.txt', 'subdirectory/archive_allany.txt'])

    # 32bit dev has additional files under the same archive name.
    arch = '32bit'
    buildtype = 'dev'
    fparser = archive_utils.FilesCfgParser(files_cfg, buildtype, arch)
    archives = fparser.ParseArchiveLists()
    self.assertEqual(archives.keys(), ARCHIVENAMES)
    self.assertItemsEqual([x['filename'] for x in archives[STATIC_ARCHIVE]],
                     ['archive_allany.txt', 'subdirectory/archive_allany.txt',
                      'subdirectory/archive_dev32.txt'])
    self.assertItemsEqual([x['filename'] for x in archives[ARCHIVE_1]],
                     ['multiple_archive.txt', 'archive_1.txt'])
    self.assertItemsEqual([x['filename'] for x in archives[ARCHIVE_2]],
                     ['multiple_archive.txt', 'archive_2.txt'])
Exemplo n.º 2
0
  def testWinParseSymbols(self):
    files_cfg = options.src_base + RealFilesCfgTest.WIN_PATH

    # There should be some official build symbols.
    fparser = archive_utils.FilesCfgParser(files_cfg, 'official', '32bit')
    official_list = fparser.ParseGroup('symsrc')
    self.assertTrue(official_list)

    # Windows symbols should be the same regardless of arch.
    fparser = archive_utils.FilesCfgParser(files_cfg, 'official', '64bit')
    official64_list = fparser.ParseGroup('symsrc')
    self.assertEqual(official64_list, official_list)
Exemplo n.º 3
0
  def testOptionalFiles(self):
    files_cfg = CreateTestFilesCfg(self.temp_dir)
    optional_fn = 'allany_dev_optional.txt'
    arch = '64bit'
    buildtype = 'dev'
    fparser = archive_utils.FilesCfgParser(files_cfg, buildtype, arch)
    self.assertTrue(fparser.IsOptional(optional_fn))

    # It's only optional for 'dev' builds.
    buildtype = 'official'
    fparser = archive_utils.FilesCfgParser(files_cfg, buildtype, arch)
    self.assertFalse(fparser.IsOptional(optional_fn))
Exemplo n.º 4
0
  def ParseFilesCfg(self, cfg_path):
    if cfg_path.startswith('svn://'):
      # Store the svn file so it will be automatically cleaned up in tearDown().
      self.files_cfg = FetchSvn(cfg_path, self.svn)
      cfg_path = self.files_cfg

    # There should always be some 32bit, official and dev files (otherwise
    # there's nothing to archive).
    arch = '32bit'
    buildtype = 'official'
    fparser = archive_utils.FilesCfgParser(cfg_path, buildtype, arch)
    files_list = fparser.ParseLegacyList()
    self.assertTrue(files_list)
    fparser.buildtype = 'dev'
    files_list = fparser.ParseLegacyList()
    self.assertTrue(files_list)

    # Arbitrary buildtype shouldn't return anything.
    fparser.buildtype = 'bogus'
    files_list = fparser.ParseLegacyList()
    self.assertFalse(files_list)

    # Check for incomplete/incorrect settings.
    # buildtype must exist and be in ['dev', 'official']
    self.assertFalse([f for f in fparser._files_cfg # pylint: disable=W0212
        if not f['buildtype']
        or set(f['buildtype']) - set(['dev', 'official'])])
Exemplo n.º 5
0
  def testCreateZipExtArchive(self):
    files_cfg = CreateTestFilesCfg(self.tool_dir)
    CreateFileSetInDir(self.build_dir, [i['filename'] for i in TEST_FILES_CFG])
    archive_name = 'test_with_ext.zip'
    arch = '64bit'
    buildtype = 'official'
    fparser = archive_utils.FilesCfgParser(files_cfg, buildtype, arch)
    files_list = fparser.ParseLegacyList()
    zip_dir, zip_file_path = archive_utils.CreateArchive(
        self.build_dir, self.temp_dir, files_list, archive_name)
    self.assertTrue(zip_dir)
    self.assertTrue(zip_file_path)
    self.assertTrue(os.path.exists(zip_file_path))
    self.assertEqual(os.path.basename(zip_file_path), archive_name)
    self.verifyZipFile(zip_dir, zip_file_path, os.path.basename(zip_dir),
                       files_list)

    # Creating the archive twice is wasteful, but shouldn't fail (e.g. due to
    # conflicts with existing zip_dir or zip_file_path). This also tests the
    # condition on the bots where they don't clean up their staging directory
    # between runs.
    zip_dir, zip_file_path = archive_utils.CreateArchive(
        self.build_dir, self.temp_dir, files_list, archive_name)
    self.assertTrue(zip_dir)
    self.assertTrue(zip_file_path)
    self.assertTrue(os.path.exists(zip_file_path))
    self.verifyZipFile(zip_dir, zip_file_path, os.path.basename(zip_dir),
                       files_list)
Exemplo n.º 6
0
  def testParserChange(self):
    """Changing parser criteria should be the same as creating a new one."""
    files_cfg = CreateTestFilesCfg(self.temp_dir)
    arch = '64bit'
    buildtype = 'dev'
    oldfparser = archive_utils.FilesCfgParser(files_cfg, buildtype, arch)
    old_dev_list = oldfparser.ParseLegacyList()
    buildtype = 'official'
    oldfparser.buildtype = buildtype
    old_official_list = oldfparser.ParseLegacyList()
    # The changed parser should return different ParseLegacyList.
    self.assertNotEqual(sorted(old_dev_list), sorted(old_official_list))

    newfparser = archive_utils.FilesCfgParser(files_cfg, buildtype, arch)
    new_official_list = newfparser.ParseLegacyList()
    # The new parser and changed parser should return the same data.
    self.assertEqual(sorted(old_official_list), sorted(new_official_list))
    old_allany_list = oldfparser.ParseGroup('allany')
    new_allany_list = oldfparser.ParseGroup('allany')
    self.assertEqual(sorted(old_allany_list), sorted(new_allany_list))
Exemplo n.º 7
0
 def testDirectArchive(self):
   files_cfg = CreateTestFilesCfg(self.temp_dir)
   arch = '64bit'
   buildtype = 'dev'
   fparser = archive_utils.FilesCfgParser(files_cfg, buildtype, arch)
   archives = fparser.ParseArchiveLists()
   self.assertTrue(fparser.IsDirectArchive(
       archives['renamed_direct_archive.txt']))
   self.assertTrue(fparser.IsDirectArchive(
       archives['dev64_implied_direct_archive.txt']))
   self.assertFalse(fparser.IsDirectArchive(archives['static_archive.zip']))
Exemplo n.º 8
0
 def testCreateEmptyArchive(self):
   files_cfg = CreateTestFilesCfg(self.tool_dir)
   archive_name = 'test_empty'
   arch = '64bit'
   buildtype = 'nosuchtype'
   fparser = archive_utils.FilesCfgParser(files_cfg, buildtype, arch)
   files_list = fparser.ParseLegacyList()
   zip_dir, zip_file_path = archive_utils.CreateArchive(
       self.build_dir, self.temp_dir, files_list, archive_name)
   self.assertFalse(zip_dir)
   self.assertFalse(zip_file_path)
   self.assertFalse(os.path.exists(zip_file_path))
Exemplo n.º 9
0
 def testParseLegacyList(self):
     files_cfg = CreateTestFilesCfg(self.temp_dir)
     arch = '64bit'
     buildtype = 'official'
     fparser = archive_utils.FilesCfgParser(files_cfg, buildtype, arch)
     files_list = fparser.ParseLegacyList()
     # Verify FILES.cfg was parsed correctly.
     for i in TEST_FILES_CFG:
         if arch in i['arch'] and buildtype in i['buildtype']:
             # 'archive' flagged files shouldn't be included in the default parse.
             if i.get('archive'):
                 self.assertFalse(i['filename'] in files_list)
             else:
                 self.assertTrue(i['filename'] in files_list)
                 files_list.remove(i['filename'])
                 # No duplicate files.
                 self.assertEqual(files_list.count(i['filename']), 0)
     # No unexpected files.
     self.assertEqual(len(files_list), 0)
    def ArchiveBuild(self):
        """Zips build files and uploads them, their symbols, and a change log."""
        result = 0
        if self._build_revision is None:
            raise archive_utils.StagingError('No build revision was provided')
        print 'Staging in %s' % self._staging_dir

        fparser = archive_utils.FilesCfgParser(self._files_file,
                                               self.options.mode,
                                               self.options.arch)
        files_list = fparser.ParseLegacyList()
        self._archive_files = archive_utils.ExpandWildcards(
            self._build_dir, files_list)
        archives_list = fparser.ParseArchiveLists()
        # Check files and revision numbers.
        all_files_list = self._archive_files + [
            item['filename'] for sublist in archives_list.values()
            for item in sublist
        ]
        all_files_list.append(self._version_file)
        not_found = archive_utils.VerifyFiles(all_files_list, self._build_dir,
                                              self.options.ignore)
        not_found_optional = []
        for bad_fn in not_found[:]:
            if fparser.IsOptional(bad_fn):
                not_found_optional.append(bad_fn)
                not_found.remove(bad_fn)
                # Remove it from all file lists so we don't try to process it.
                if bad_fn in self._archive_files:
                    self._archive_files.remove(bad_fn)
                for archive_list in archives_list.values():
                    archive_list[:] = [
                        x for x in archive_list if bad_fn != x['filename']
                    ]
        # TODO(mmoss): Now that we can declare files optional in FILES.cfg, should
        # we only allow not_found_optional, and fail on any leftover not_found
        # files?

        print 'last change: %s' % self._build_revision
        previous_revision = self.GetLastBuildRevision()
        # TODO(agable): This conditional only works for svn because git can't easily
        # compare revisions.
        if (slave_utils.GitOrSubversion(self._src_dir) == 'svn'
                and self._build_revision <= previous_revision):
            # If there have been no changes, report it but don't raise an exception.
            # Someone might have pushed the "force build" button.
            print 'No changes since last build (r%s <= r%s)' % (
                self._build_revision, previous_revision)
            return 0

        print 'build name: %s' % self._build_name

        archive_name = 'chrome-%s.zip' % self.TargetPlatformName()
        archive_file = self.CreateArchiveFile(archive_name,
                                              self._archive_files)[1]

        # Handle any custom archives.
        # TODO(mmoss): Largely copied from stage_build.py. Maybe refactor more of
        # this into archive_utils.py.
        archive_files = [archive_file]
        for archive_name in archives_list:
            # The list might be empty if it was all 'not_found' optional files.
            if not archives_list[archive_name]:
                continue
            if fparser.IsDirectArchive(archives_list[archive_name]):
                fileobj = archives_list[archive_name][0]
                # Copy the file to the path specified in archive_name, which might be
                # different than the dirname or basename in 'filename' (allowed by
                # 'direct_archive').
                stage_subdir = os.path.dirname(archive_name)
                stage_fn = os.path.basename(archive_name)
                chromium_utils.MaybeMakeDirectory(
                    os.path.join(self._staging_dir, stage_subdir))
                print 'chromium_utils.CopyFileToDir(%s, %s, dest_fn=%s)' % (
                    os.path.join(self._build_dir, fileobj['filename']),
                    os.path.join(self._staging_dir, stage_subdir), stage_fn)
                if not self.options.dry_run:
                    chromium_utils.CopyFileToDir(
                        os.path.join(self._build_dir, fileobj['filename']),
                        os.path.join(self._staging_dir, stage_subdir),
                        dest_fn=stage_fn)
                archive_files.append(
                    os.path.join(self._staging_dir, archive_name))
            else:
                custom_archive = self.CreateArchiveFile(
                    archive_name,
                    [f['filename'] for f in archives_list[archive_name]])[1]
                print 'Adding %s to be archived.' % (custom_archive)
                archive_files.append(custom_archive)

        # Generate a revisions file which contains the Chromium/WebKit/V8's
        # revision information.
        self.GenerateRevisionFile()

        www_dir = os.path.join(self._www_dir_base, self._build_path_component)
        gs_bucket = self.options.factory_properties.get('gs_bucket', None)
        gs_acl = self.options.factory_properties.get('gs_acl', None)
        gs_base = None
        if gs_bucket:
            gs_base = '/'.join(
                [gs_bucket, self._build_name, self._build_path_component])
        self._UploadBuild(www_dir, self.revisions_path, archive_files, gs_base,
                          gs_acl)

        # Archive Linux packages (if any -- only created for Chrome builds).
        if chromium_utils.IsLinux():
            linux_packages = (glob.glob(
                os.path.join(self._build_dir,
                             '*-r%s_*.deb' % self._chromium_revision)))
            linux_packages.extend(
                glob.glob(
                    os.path.join(self._build_dir,
                                 '*-%s.*.rpm' % self._chromium_revision)))
            for package_file in linux_packages:
                print 'SshCopyFiles(%s, %s, %s)' % (
                    package_file, self.options.archive_host, www_dir)
            if not self.options.dry_run:
                print 'SshMakeDirectory(%s, %s)' % (self.options.archive_host,
                                                    www_dir)
                self.MySshMakeDirectory(self.options.archive_host, www_dir,
                                        gs_base)

                for package_file in linux_packages:
                    self.MyMakeWorldReadable(package_file, gs_base)
                    self.MySshCopyFiles(package_file,
                                        self.options.archive_host,
                                        www_dir,
                                        gs_base,
                                        gs_acl=gs_acl)
                    # Cleanup archived packages, otherwise they keep accumlating since
                    # they have different filenames with each build.
                    os.unlink(package_file)

        self.UploadTests(www_dir, gs_base, gs_acl)

        if not self.options.dry_run:
            # Save the current build revision locally so we can compute a changelog
            # next time
            self.SaveBuildRevisionToSpecifiedFile(self.last_change_file)

            # Record the latest revision in the developer archive directory.
            latest_file_path = os.path.join(self._www_dir_base, 'LATEST')
            if chromium_utils.IsWindows():
                print 'Saving revision to %s' % latest_file_path
                if gs_base:
                    self.CopyFileToGS(self.last_change_file,
                                      gs_base,
                                      '..',
                                      mimetype='text/plain',
                                      gs_acl=gs_acl)
                if not gs_base or self._dual_upload:
                    self.SaveBuildRevisionToSpecifiedFile(latest_file_path)
            elif chromium_utils.IsLinux() or chromium_utils.IsMac():
                # Files are created umask 077 by default, so make it world-readable
                # before pushing to web server.
                self.MyMakeWorldReadable(self.last_change_file, gs_base)
                print 'Saving revision to %s:%s' % (self.options.archive_host,
                                                    latest_file_path)
                self.MySshCopyFiles(self.last_change_file,
                                    self.options.archive_host,
                                    latest_file_path,
                                    gs_base,
                                    '..',
                                    mimetype='text/plain',
                                    gs_acl=gs_acl)
            else:
                raise NotImplementedError(
                    'Platform "%s" is not currently supported.' % sys.platform)

        if len(not_found_optional):
            sys.stderr.write('\n\nINFO: Optional File(s) not found: %s\n' %
                             ', '.join(not_found_optional))
        if len(not_found):
            sys.stderr.write('\n\nWARNING: File(s) not found: %s\n' %
                             ', '.join(not_found))
        return result
Exemplo n.º 11
0
def DiffFilesCfg(cfg_path, svn):
  """Parse local FILES.cfg and show changes so they can be manually verified."""

  print '\nDiff parsing "%s" ...' % cfg_path
  d = difflib.Differ()
  def CompareLists(svnlist, newlist, msg):
    diffs = []
    for x in d.compare(svnlist, newlist):
      if x.startswith('- '):
        diffs.append('  DELETION: %s' % x[2:])
      elif x.startswith('+ '):
        diffs.append('  ADDITION: %s' % x[2:])
    if diffs:
      print msg
      print '\n'.join(diffs)

  svn_cfg = FetchSvn(RealFilesCfgTest.SVNBASE + cfg_path, svn)
  svnparser = archive_utils.FilesCfgParser(svn_cfg, None, None)
  os.unlink(svn_cfg)
  newparser = archive_utils.FilesCfgParser(options.src_base + cfg_path, None,
                                           None)

  # Determine the "parsable values" in the two versions.
  archs = []
  buildtypes = []
  groups = []
# pylint: disable=W0212
  for item in newparser._files_cfg + svnparser._files_cfg:
# pylint: enable=W0212
    if item.get('arch'):
      archs.extend(item['arch'])
    if item.get('buildtype'):
      buildtypes.extend(item['buildtype'])
    if item.get('filegroup'):
      groups.extend(item['filegroup'])
  archs = set(archs)
  buildtypes = set(buildtypes)
  groups = set(groups)

  # Legacy list handling (i.e. default filegroup).
  print '\nChecking ParseLegacyList() ...'
  for arch, buildtype in itertools.product(archs, buildtypes):
    msg = '%s:%s' % (arch, buildtype)
    newparser.arch = svnparser.arch = arch
    newparser.buildtype = svnparser.buildtype = buildtype
    svn_legacy_list = svnparser.ParseLegacyList()
    new_legacy_list = newparser.ParseLegacyList()
    CompareLists(svn_legacy_list, new_legacy_list, msg)

  print '\nChecking ParseGroup() ...'
  for group, arch, buildtype in itertools.product(groups, archs, buildtypes):
    msg = '%s:%s:%s' % (group, arch, buildtype)
    newparser.arch = svnparser.arch = arch
    newparser.buildtype = svnparser.buildtype = buildtype
    svn_group_list = svnparser.ParseGroup(group)
    new_group_list = newparser.ParseGroup(group)
    CompareLists(svn_group_list, new_group_list, msg)

  print '\nChecking Archives() ...'
  for arch, buildtype in itertools.product(archs, buildtypes):
    newparser.arch = svnparser.arch = arch
    newparser.buildtype = svnparser.buildtype = buildtype
    svn_archive_lists = svnparser.ParseArchiveLists()
    new_archive_lists = newparser.ParseArchiveLists()
    archives = set(svn_archive_lists.keys() + new_archive_lists.keys())
    for archive in archives:
      msg = '%s:%s:%s' % (archive, arch, buildtype)
      CompareLists([x['filename'] for x in svn_archive_lists.get(archive, [])],
                   [x['filename'] for x in new_archive_lists.get(archive, [])],
                   msg)