def testGetBuildUrl(self): options = MockOptions() base_filename, _version_suffix = slave_utils.GetZipFileNames( options.build_properties, build_revision=self._build_revision, webkit_revision=self._webkit_revision, extract=True) gs_url_without_slash = 'gs://foo/Win' gs_url_with_slash = 'gs://foo/Win/' gs_url_with_filename = 'gs://foo/Win/%s.zip' % base_filename http_url_without_slash = 'http://foo/Win' http_url_with_slash = 'http://foo/Win/' http_url_with_filename = 'http://foo/Win/%s.zip' % base_filename expected_gs_url = gs_url_with_slash + base_filename + '.zip' expected_http_url = http_url_with_slash + base_filename + '.zip' # Verify that only one slash is added: URL without ending slash. self._VerifyBuildUrl(options, gs_url_without_slash, expected_gs_url) self._VerifyBuildUrl(options, http_url_without_slash, expected_http_url) # URL with ending slash. self._VerifyBuildUrl(options, gs_url_with_slash, expected_gs_url) self._VerifyBuildUrl(options, http_url_with_slash, expected_http_url) # URL with filename. self._VerifyBuildUrl(options, gs_url_with_filename, expected_gs_url) self._VerifyBuildUrl(options, http_url_with_filename, expected_http_url)
def testGetBuildUrl(self): options = MockOptions() # version_suffix is not tested, since it would just be copying of # implementation details from extract_build.py into this test. src_dir = os.path.dirname(_BUILD_DIR) base_filename, _version_suffix = slave_utils.GetZipFileNames( options.build_properties, src_dir, None, extract=True) gs_url_without_slash = 'gs://foo/Win' gs_url_with_slash = 'gs://foo/Win/' gs_url_with_filename = 'gs://foo/Win/%s.zip' % base_filename http_url_without_slash = 'http://foo/Win' http_url_with_slash = 'http://foo/Win/' http_url_with_filename = 'http://foo/Win/%s.zip' % base_filename expected_gs_url = gs_url_with_slash + base_filename + '.zip' expected_http_url = http_url_with_slash + base_filename + '.zip' # Verify that only one slash is added: URL without ending slash. self._VerifyBuildUrl(options, gs_url_without_slash, expected_gs_url) self._VerifyBuildUrl(options, http_url_without_slash, expected_http_url) # URL with ending slash. self._VerifyBuildUrl(options, gs_url_with_slash, expected_gs_url) self._VerifyBuildUrl(options, http_url_with_slash, expected_http_url) # URL with filename. self._VerifyBuildUrl(options, gs_url_with_filename, expected_gs_url) self._VerifyBuildUrl(options, http_url_with_filename, expected_http_url)
def testNormalBuildNameTryBot(self): build_properties = { 'mastername': 'master.tryserver.chromium', 'buildnumber': 666 } (base_name, version_suffix) = slave_utils.GetZipFileNames(build_properties, 123) self._verifyBaseName(base_name) self.assertEqual('_666', version_suffix)
def testNormalBuildNameTryBotExtractWithParentBuildNumber(self): build_properties = { 'mastername': 'master.tryserver.chromium', 'buildnumber': 666, 'parent_buildnumber': 999 } (base_name, version_suffix) = slave_utils.GetZipFileNames(build_properties, 123, extract=True) self._verifyBaseName(base_name) self.assertEqual('_999', version_suffix)
def GetBuildUrl(options, build_revision, webkit_revision=None): """Compute the url to download the build from. This will use as a base string, in order of preference: 0) options.build_archive_url 1) options.build_url 2) options.factory_properties.build_url 3) build url constructed from build_properties. This last type of construction is not compatible with the 'force build' button. Args: options: options object as specified by parser below. build_revision: Revision for the build. webkit_revision: WebKit revision (optional) """ if options.build_archive_url: return options.build_archive_url, None base_filename, version_suffix = slave_utils.GetZipFileNames( options.master_name, options.build_number, options.parent_build_number, build_revision, webkit_revision, extract=True) replace_dict = { 'base_filename': base_filename, 'parentname': options.parent_builder_name, 'parentslavename': options.parent_slave_name, 'parent_builddir': options.parent_build_dir, } # If builddir isn't specified, assume buildbot used the builder name # as the root folder for the build. if not replace_dict.get('parent_builddir') and replace_dict.get('parentname'): replace_dict['parent_builddir'] = replace_dict.get('parentname', '') url = options.build_url if not url: url = ('http://%(parentslavename)s/b/build/slave/%(parent_builddir)s/' 'chrome_staging') if url[-4:] != '.zip': # assume filename not specified # Append the filename to the base URL. First strip any trailing slashes. url = url.rstrip('/') url = '%s/%s' % (url, '%(base_filename)s.zip') url = url % replace_dict archive_name = url.split('/')[-1] versioned_url = url.replace('.zip', version_suffix + '.zip') return versioned_url, archive_name
def GetBuildUrl(abs_build_dir, options): """Compute the url to download the build from. This will use as a base string, in order of preference: 1) options.build_url 2) options.factory_properties.build_url 3) build url constructed from build_properties. This last type of construction is not compatible with the 'force build' button. Args: abs_build_dir: Full path to source directory. options: options object as specified by parser below. """ abs_webkit_dir = None if options.webkit_dir: abs_webkit_dir = os.path.join(abs_build_dir, '..', options.webkit_dir) src_dir = os.path.dirname(abs_build_dir) base_filename, version_suffix = slave_utils.GetZipFileNames( options.build_properties, src_dir, abs_webkit_dir, extract=True) replace_dict = dict(options.build_properties) # If builddir isn't specified, assume buildbot used the builder name # as the root folder for the build. if not replace_dict.get('parent_builddir') and replace_dict.get('parentname'): replace_dict['parent_builddir'] = replace_dict.get('parentname', '') replace_dict['base_filename'] = base_filename url = options.build_url or options.factory_properties.get('build_url') if not url: url = ('http://%(parentslavename)s/b/build/slave/%(parent_builddir)s/' 'chrome_staging') if url[-4:] != '.zip': # assume filename not specified # Append the filename to the base URL. First strip any trailing slashes. url = url.rstrip('/') url = '%s/%s' % (url, '%(base_filename)s.zip') url = url % replace_dict versioned_url = url.replace('.zip', version_suffix + '.zip') return url, versioned_url
def Archive(options): build_dir = build_directory.GetBuildOutputDirectory( options.src_dir, options.cros_board) build_dir = os.path.abspath(os.path.join(build_dir, options.target)) staging_dir = slave_utils.GetStagingDir(options.src_dir) chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir) if not options.build_revision: (build_revision, webkit_revision) = slave_utils.GetBuildRevisions( options.src_dir, options.webkit_dir, options.revision_dir) else: build_revision = options.build_revision webkit_revision = options.webkit_revision unversioned_base_name, version_suffix = slave_utils.GetZipFileNames( options.master_name, options.build_number, options.parent_build_number, build_revision, webkit_revision, use_try_buildnumber=(not options.append_deps_patch_sha)) if options.append_deps_patch_sha: deps_sha = os.path.join('src', 'DEPS.sha') if os.path.exists(deps_sha): sha = open(deps_sha).read() version_suffix = '%s_%s' % (version_suffix, sha.strip()) print 'Appending sha of the patch: %s' % sha else: print 'DEPS.sha file not found, not appending sha.' print 'Full Staging in %s' % staging_dir print 'Build Directory %s' % build_dir # Include the revision file in tarballs WriteRevisionFile(build_dir, build_revision) # Copy the crt files if necessary. if options.target == 'Debug' and chromium_utils.IsWindows(): CopyDebugCRT(build_dir) # Build the list of files to archive. root_files = os.listdir(build_dir) # Remove initial\chrome.ilk. The filtering is only done on toplevel files, # and we can't exclude everything in initial since initial\chrome.dll.pdb is # needed in the archive. (And we can't delete it on disk because that would # slow down the next incremental build). if 'initial' in root_files: # Expand 'initial' directory by its contents, so that initial\chrome.ilk # will be filtered out by the blacklist. index = root_files.index('initial') root_files[index:index + 1] = [ os.path.join('initial', f) for f in os.listdir(os.path.join(build_dir, 'initial')) ] path_filter = PathMatcher(options) print path_filter print('\nActually excluded: %s' % [f for f in root_files if not path_filter.Match(f)]) zip_file_list = [f for f in root_files if path_filter.Match(f)] # TODO(yzshen): Once we have swarming support ready, we could use it to # archive run time dependencies of tests and remove this step. mojom_files = _MojomFiles(build_dir, ['.mojom.js', '_mojom.py']) print 'Include mojom files: %s' % mojom_files zip_file_list.extend(mojom_files) zip_file = MakeUnversionedArchive(build_dir, staging_dir, zip_file_list, unversioned_base_name) zip_base, zip_ext, versioned_file = MakeVersionedArchive( zip_file, version_suffix, options) prune_limit = 10 if options.build_url.startswith('gs://'): # Don't keep builds lying around when uploading them to google storage. prune_limit = 3 PruneOldArchives(staging_dir, zip_base, zip_ext, prune_limit=prune_limit) # Update the latest revision file in the staging directory # to allow testers to figure out the latest packaged revision # without downloading tarballs. revision_file = WriteRevisionFile(staging_dir, build_revision) urls = {} if options.build_url.startswith('gs://'): zip_url = UploadToGoogleStorage(versioned_file, revision_file, options.build_url, options.gs_acl) storage_url = ('https://storage.googleapis.com/%s/%s' % (options.build_url[len('gs://'):], os.path.basename(versioned_file))) urls['storage_url'] = storage_url else: staging_path = (os.path.splitdrive(versioned_file)[1].replace( os.path.sep, '/')) zip_url = 'http://' + options.slave_name + staging_path urls['zip_url'] = zip_url return urls
def testWebKitName(self): (base_name, version_suffix) = slave_utils.GetZipFileNames({}, 123, 456) self._verifyBaseName(base_name) self.assertEqual('_wk456_123', version_suffix)
def dummy(): slave_utils.GetZipFileNames(build_properties, 123, extract=True)
def testNormalBuildName(self): (base_name, version_suffix) = slave_utils.GetZipFileNames({}, 123) self._verifyBaseName(base_name) self.assertEqual('_123', version_suffix)
def testNormalBuildNameTryBotExtractWithParentBuildNumber(self): (base_name, version_suffix) = slave_utils.GetZipFileNames( 'master.tryserver.chromium.linux', 666, 999, 123, extract=True) self._verifyBaseName(base_name) self.assertEqual('_999', version_suffix)
def dummy(): slave_utils.GetZipFileNames('master.tryserver.chromium.linux', 666, None, 123, extract=True)
def testNormalBuildNameTryBot(self): (base_name, version_suffix) = slave_utils.GetZipFileNames( 'master.tryserver.chromium.linux', 666, None, 123) self._verifyBaseName(base_name) self.assertEqual('_666', version_suffix)
def Archive(options): build_dir = build_directory.GetBuildOutputDirectory( options.src_dir, options.cros_board) build_dir = os.path.abspath(os.path.join(build_dir, options.target)) staging_dir = (options.staging_dir or slave_utils.GetStagingDir(options.src_dir)) if not os.path.exists(staging_dir): os.makedirs(staging_dir) chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir) if not options.build_revision: (build_revision, webkit_revision) = slave_utils.GetBuildRevisions( options.src_dir, options.webkit_dir, options.revision_dir) else: build_revision = options.build_revision webkit_revision = options.webkit_revision unversioned_base_name, version_suffix = slave_utils.GetZipFileNames( options.master_name, options.build_number, options.parent_build_number, build_revision, webkit_revision, use_try_buildnumber=(not options.append_deps_patch_sha)) # TODO(robertocn): Remove this if no one other than bisect uses it. if options.append_deps_patch_sha: deps_sha = os.path.join('src', 'DEPS.sha') if os.path.exists(deps_sha): sha = open(deps_sha).read() version_suffix = '%s_%s' % (version_suffix, sha.strip()) print 'Appending sha of the patch: %s' % sha else: print 'DEPS.sha file not found, not appending sha.' print 'Full Staging in %s' % staging_dir print 'Build Directory %s' % build_dir # Include the revision file in tarballs WriteRevisionFile(build_dir, build_revision) # Copy the crt files if necessary. if options.target == 'Debug' and chromium_utils.IsWindows(): CopyDebugCRT(build_dir) # Build the list of files to archive. root_files = os.listdir(build_dir) path_filter = PathMatcher(options) # Expand one level deep so that secondary toolchains can be filtered. for i in xrange(len(root_files) - 1, -1, -1): path = root_files[i] # Don't expand directories that will be filtered out. if not path_filter.Match(path): continue abs_path = os.path.join(build_dir, path) if os.path.isdir(abs_path): root_files[i:i + 1] = [ os.path.join(path, f) for f in os.listdir(abs_path) ] print path_filter print('\nActually excluded: %s' % [f for f in root_files if not path_filter.Match(f)]) zip_file_list = [f for f in root_files if path_filter.Match(f)] # TODO(yzshen): Switch layout tests to use files from 'gen/layout_test_data' # and remove this. mojom_files = _MojomFiles(build_dir, ['.mojom.js', '_mojom.py']) print 'Include mojom files: %s' % mojom_files zip_file_list.extend(mojom_files) layout_test_data_files = _LayoutTestFiles(build_dir) print 'Include layout test data: %s' % layout_test_data_files zip_file_list.extend(layout_test_data_files) zip_file = MakeUnversionedArchive(build_dir, staging_dir, zip_file_list, unversioned_base_name, strip_files=options.strip_files) zip_base, zip_ext, versioned_file = MakeVersionedArchive( zip_file, version_suffix, options) prune_limit = 10 if options.build_url.startswith('gs://'): # Don't keep builds lying around when uploading them to google storage. prune_limit = 3 PruneOldArchives(staging_dir, zip_base, zip_ext, prune_limit=prune_limit) # Update the latest revision file in the staging directory # to allow testers to figure out the latest packaged revision # without downloading tarballs. revision_file = WriteRevisionFile(staging_dir, build_revision) urls = {} if options.build_url.startswith('gs://'): zip_url = UploadToGoogleStorage(versioned_file, revision_file, options.build_url, options.gs_acl, options.gsutil_py_path) storage_url = ('https://storage.googleapis.com/%s/%s' % (options.build_url[len('gs://'):], os.path.basename(versioned_file))) urls['storage_url'] = storage_url else: staging_path = (os.path.splitdrive(versioned_file)[1].replace( os.path.sep, '/')) zip_url = 'http://' + options.slave_name + staging_path urls['zip_url'] = zip_url return urls
def Archive(options): build_dir = build_directory.GetBuildOutputDirectory(options.src_dir) build_dir = os.path.abspath(os.path.join(build_dir, options.target)) staging_dir = slave_utils.GetStagingDir(options.src_dir) chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir) if not options.build_revision: (build_revision, webkit_revision) = slave_utils.GetBuildRevisions( options.src_dir, options.webkit_dir, options.revision_dir) else: build_revision = options.build_revision webkit_revision = options.webkit_revision append_deps_patch_sha = options.factory_properties.get( 'append_deps_patch_sha') unversioned_base_name, version_suffix = slave_utils.GetZipFileNames( options.build_properties, build_revision, webkit_revision, use_try_buildnumber=(not append_deps_patch_sha)) if append_deps_patch_sha: deps_sha = os.path.join('src', 'DEPS.sha') if os.path.exists(deps_sha): sha = open(deps_sha).read() version_suffix = '%s_%s' % (version_suffix, sha.strip()) print 'Appending sha of the patch: %s' % sha else: print 'DEPS.sha file not found, not appending sha.' print 'Full Staging in %s' % staging_dir print 'Build Directory %s' % build_dir # Include the revision file in tarballs WriteRevisionFile(build_dir, build_revision) # Copy the crt files if necessary. if options.target == 'Debug' and chromium_utils.IsWindows(): CopyDebugCRT(build_dir) # Build the list of files to archive. root_files = os.listdir(build_dir) # Remove initial\chrome.ilk. The filtering is only done on toplevel files, # and we can't exclude everything in initial since initial\chrome.dll.pdb is # needed in the archive. (And we can't delete it on disk because that would # slow down the next incremental build). if 'initial' in root_files: # Expand 'initial' directory by its contents, so that initial\chrome.ilk # will be filtered out by the blacklist. index = root_files.index('initial') root_files[index:index + 1] = [ os.path.join('initial', f) for f in os.listdir(os.path.join(build_dir, 'initial')) ] path_filter = PathMatcher(options) print path_filter print('\nActually excluded: %s' % [f for f in root_files if not path_filter.Match(f)]) zip_file_list = [f for f in root_files if path_filter.Match(f)] # TODO(yzshen): Once we have swarming support ready, we could use it to # archive run time dependencies of tests and remove this step. mojom_js_files = MojomJSFiles(build_dir) print 'Include mojom JavaScript files: %s' % mojom_js_files zip_file_list.extend(mojom_js_files) zip_file = MakeUnversionedArchive(build_dir, staging_dir, zip_file_list, unversioned_base_name, options.path_filter) zip_base, zip_ext, versioned_file = MakeVersionedArchive( zip_file, version_suffix, options) prune_limit = max(0, int(options.factory_properties.get('prune_limit', 10))) PruneOldArchives(staging_dir, zip_base, zip_ext, prune_limit=prune_limit) # Update the latest revision file in the staging directory # to allow testers to figure out the latest packaged revision # without downloading tarballs. revision_file = WriteRevisionFile(staging_dir, build_revision) build_url = (options.build_url or options.factory_properties.get('build_url', '')) if build_url.startswith('gs://'): gs_acl = options.factory_properties.get('gs_acl') UploadToGoogleStorage(versioned_file, revision_file, build_url, gs_acl) return 0