def __init__(self, options): """Overwrite _build_revision for archiving build by Chromium revision.""" StagerBase.__init__(self, options, None) self._build_revision = self._chromium_revision self._build_path_component = chromium_utils.GetSortableUploadPathForSortKey( *chromium_utils.GetBuildSortKey(options))
def archive(options, args): # Disable 'unused argument' warning for 'args' | pylint: disable=W0613 build_dir = build_directory.GetBuildOutputDirectory() src_dir = os.path.abspath(os.path.dirname(build_dir)) build_dir = os.path.join(build_dir, options.target) revision_dir = options.factory_properties.get('revision_dir') primary_project = chromium_utils.GetPrimaryProject(options) build_sortkey_branch, build_sortkey_value = GetBuildSortKey( options, primary_project) build_git_commit = GetGitCommit(options, primary_project) staging_dir = slave_utils.GetStagingDir(src_dir) chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir) print 'Staging in %s' % build_dir # Build the list of files to archive. zip_file_list = [ f for f in os.listdir(build_dir) if ShouldPackageFile(f, options.target) ] subdir_suffix = options.factory_properties.get('cf_archive_subdir_suffix', '') pieces = [chromium_utils.PlatformName(), options.target.lower()] if subdir_suffix: pieces.append(subdir_suffix) subdir = '-'.join(pieces) # Components like v8 get a <name>-v8-component-<revision> infix. component = '' if revision_dir: component = '-%s-component' % revision_dir prefix = options.factory_properties.get('cf_archive_name', 'cf_archive') sortkey_path = chromium_utils.GetSortableUploadPathForSortKey( build_sortkey_branch, build_sortkey_value) zip_file_name = '%s-%s-%s%s-%s' % (prefix, chromium_utils.PlatformName(), options.target.lower(), component, sortkey_path) (zip_dir, zip_file) = chromium_utils.MakeZip(staging_dir, zip_file_name, zip_file_list, build_dir, raise_error=True) chromium_utils.RemoveDirectory(zip_dir) if not os.path.exists(zip_file): raise StagingError('Failed to make zip package %s' % zip_file) chromium_utils.MakeWorldReadable(zip_file) # Report the size of the zip file to help catch when it gets too big. zip_size = os.stat(zip_file)[stat.ST_SIZE] print 'Zip file is %ld bytes' % zip_size gs_bucket = options.factory_properties.get('gs_bucket', None) gs_acl = options.factory_properties.get('gs_acl', None) gs_metadata = { GS_COMMIT_POSITION_NUMBER_KEY: build_sortkey_value, } if build_sortkey_branch: gs_metadata[ GS_COMMIT_POSITION_KEY] = chromium_utils.BuildCommitPosition( build_sortkey_branch, build_sortkey_value) if build_git_commit: gs_metadata[GS_GIT_COMMIT_KEY] = build_git_commit status = slave_utils.GSUtilCopyFile(zip_file, gs_bucket, subdir=subdir, gs_acl=gs_acl, metadata=gs_metadata) if status: raise StagingError('Failed to upload %s to %s. Error %d' % (zip_file, gs_bucket, status)) else: # Delete the file, it is not needed anymore. os.remove(zip_file) return status
def main(): option_parser = optparse.OptionParser() chromium_utils.AddPropertiesOptions(option_parser) options, _ = option_parser.parse_args() if not os.path.exists('src'): raise Exception('ERROR: no src directory to package, exiting') try: revision_upload_path = chromium_utils.GetSortableUploadPathForSortKey( *chromium_utils.GetBuildSortKey(options)) except chromium_utils.NoIdentifiedRevision: revision_upload_path = 'NONE' completed_filename = '%s-%s.%s' % (options.factory_properties.get( 'package_filename', FILENAME), revision_upload_path, EXT) partial_filename = '%s.partial' % completed_filename chromium_utils.RunCommand(['rm', '-f', partial_filename]) if os.path.exists(partial_filename): raise Exception('ERROR: %s cannot be removed, exiting' % partial_filename) print '%s: Index generation...' % time.strftime('%X') indexing_successful = GenerateIndex() print '%s: Creating tar file...' % time.strftime('%X') packaging_successful = True find_command = [ 'find', 'src/', 'tools/', '/usr/include/', '-type', 'f', # The only files under src/out we want to package up # are index files.... '(', '-regex', '^src/out/.*\.index$', '-o', '(', # ... and generated sources... '-regex', '^src/out/.*/gen/.*', '-a', '(', '-name', '*.h', '-o', '-name', '*.cc', '-o', '-name', '*.cpp', '-o', '-name', '*.js', ')', '-a', # ... but none of the NaCL stuff. '!', '-regex', '^src/out/[^/]*/gen/lib[^/]*/.*', '-a', '!', '-regex', '^src/out/[^/]*/gen/sdk/.*', '-a', '!', '-regex', '^src/out/[^/]*/gen/tc_.*', ')', '-o', '!', '-regex', '^src/out/.*', ')', '-a', # Exclude all .svn directories, the native client toolchain # and the llvm build directory, and perf/data files. '!', '-regex', r'.*/\.svn/.*', '-a', '!', '-regex', r'.*/\.git/.*', '-a', '!', '-regex', '^src/native_client/toolchain/.*', '-a', '!', '-regex', '^src/native_client/.*/testdata/.*', '-a', '!', '-regex', '^src/third_party/llvm-build/.*', '-a', '!', '-regex', '^src/.*/\.cvsignore', '-a', '!', '-regex', '^src/chrome/tools/test/reference_build/.*', '-a', '!', '-regex', '^tools/perf/data/.*' ] try: if chromium_utils.RunCommand( find_command, pipes=[['tar', '-T-', '-cjvf', partial_filename] ]) != 0: raise Exception('ERROR: failed to create %s, exiting' % partial_filename) print '%s: Cleaning up google storage...' % time.strftime('%X') DeleteIfExists(completed_filename) DeleteIfExists(partial_filename) print '%s: Uploading...' % time.strftime('%X') status = slave_utils.GSUtilCopyFile(partial_filename, GSBASE, gs_acl=GSACL) if status != 0: raise Exception('ERROR: GSUtilCopyFile error %d. "%s" -> "%s"' % (status, partial_filename, GSBASE)) print '%s: Finalizing google storage...' % time.strftime('%X') status = slave_utils.GSUtilMoveFile( '%s/%s' % (GSBASE, partial_filename), '%s/%s' % (GSBASE, completed_filename), gs_acl=GSACL) if status != 0: raise Exception('ERROR: GSUtilMoveFile error %d. "%s" -> "%s"' % (status, '%s/%s' % (GSBASE, partial_filename), '%s/%s' % (GSBASE, completed_filename))) (status, output) = slave_utils.GSUtilListBucket(GSBASE, ['-l']) if status != 0: raise Exception('ERROR: failed to get list of GSBASE, exiting' % GSBASE) regex = re.compile('\s*\d+\s+([-:\w]+)\s+%s/%s\n' % (GSBASE, completed_filename)) match_data = regex.search(output) modified_time = None if match_data: modified_time = match_data.group(1) if not modified_time: raise Exception('ERROR: could not get modified_time, exiting') print 'Last modified time: %s' % modified_time print '%s: Deleting old archives on google storage...' % time.strftime( '%X') regex = re.compile('\s*\d+\s+([-:\w]+)\s+(%s/.*%s.*)\n' % (GSBASE, EXT)) last_week = int(time.time()) - 7 * 24 * 60 * 60 for match_data in regex.finditer(output): timestamp = int( time.strftime( '%s', time.strptime(match_data.group(1), '%Y-%m-%dT%H:%M:%S'))) if timestamp < last_week: print 'Deleting %s...' % match_data.group(2) status = slave_utils.GSUtilDeleteFile(match_data.group(2)) if status != 0: raise Exception('ERROR: GSUtilDeleteFile error %d. "%s"' % (status, match_data.group(2))) except Exception, e: print str(e) packaging_successful = False