def UploadToGoogleStorage(versioned_file, revision_file, build_url, gs_acl, gsutil_py_path=None): override_gsutil = None if gsutil_py_path: override_gsutil = [sys.executable, gsutil_py_path] if slave_utils.GSUtilCopyFile(versioned_file, build_url, gs_acl=gs_acl, override_gsutil=override_gsutil): raise chromium_utils.ExternalError( 'gsutil returned non-zero status when uploading %s to %s!' % (versioned_file, build_url)) print 'Successfully uploaded %s to %s' % (versioned_file, build_url) # The file showing the latest uploaded revision must be named LAST_CHANGE # locally since that filename is used in the GS bucket as well. last_change_file = os.path.join(os.path.dirname(revision_file), 'LAST_CHANGE') shutil.copy(revision_file, last_change_file) if slave_utils.GSUtilCopyFile(last_change_file, build_url, gs_acl=gs_acl, override_gsutil=override_gsutil): raise chromium_utils.ExternalError( 'gsutil returned non-zero status when uploading %s to %s!' % (last_change_file, build_url)) print 'Successfully uploaded %s to %s' % (last_change_file, build_url) os.remove(last_change_file) return '/'.join([build_url, os.path.basename(versioned_file)])
def _Run(self): chrome_path = os.path.join(os.pardir, 'src') with misc.ChDir(chrome_path): shell_utils.run( ['git', 'config', '--local', 'user.name', DEPS_ROLL_NAME]) shell_utils.run( ['git', 'config', '--local', 'user.email', DEPS_ROLL_AUTHOR]) auto_roll = os.path.join(misc.BUILDBOT_PATH, 'third_party', 'chromium_buildbot_tot', 'scripts', 'tools', 'blink_roller', 'auto_roll.py') # python auto_roll.py <project> <author> <path to chromium/src> cmd = ['python', auto_roll, 'skia', DEPS_ROLL_AUTHOR, chrome_path] exception = None try: output = shell_utils.run(cmd) except shell_utils.CommandFailedException as e: output = e.output # Suppress failure for "refusing to roll backwards." if not re.search(REGEXP_ROLL_TOO_OLD, output): exception = e bucket_url = gs_utils.GSUtils.with_gs_prefix( skia_vars.GetGlobalVariable('googlestorage_bucket')) match = re.search(REGEXP_ISSUE_CREATED, output) if match: issue = match.group('issue') print 'Found issue #', issue with open(FILENAME_CURRENT_ATTEMPT, 'w') as f: f.write(HTML_CONTENT % (ISSUE_URL_TEMPLATE % {'issue': issue})) slave_utils.GSUtilCopyFile(filename=FILENAME_CURRENT_ATTEMPT, gs_base=bucket_url, subdir=None, gs_acl='public-read') roll_status = None for regexp, status_msg in ROLL_STATUSES: match = re.search(regexp, output) if match: roll_status = status_msg % match.groupdict() break if roll_status: with open(FILENAME_ROLL_STATUS, 'w') as f: f.write(roll_status) slave_utils.GSUtilCopyFile(filename=FILENAME_ROLL_STATUS, gs_base=bucket_url, subdir=None, gs_acl='public-read') #pylint: disable=E0702 if exception: raise exception
def archive_layout(options, args): logging.basicConfig(level=logging.INFO, format='%(asctime)s %(filename)s:%(lineno)-3d' ' %(levelname)s %(message)s', datefmt='%y%m%d %H:%M:%S') chrome_dir = os.path.abspath(options.build_dir) results_dir_basename = os.path.basename(options.results_dir) if options.results_dir is not None: options.results_dir = os.path.abspath( os.path.join(options.build_dir, options.results_dir)) else: options.results_dir = chromium_utils.FindUpward(chrome_dir, RESULT_DIR) print 'Archiving results from %s' % options.results_dir staging_dir = slave_utils.GetStagingDir(chrome_dir) print 'Staging in %s' % staging_dir (actual_file_list, diff_file_list) = _CollectArchiveFiles(options.results_dir) zip_file = chromium_utils.MakeZip(staging_dir, results_dir_basename, actual_file_list, options.results_dir)[1] full_results_json = os.path.join(options.results_dir, 'full_results.json') # Extract the build name of this slave (e.g., 'chrome-release') from its # configuration file if not provided as a param. build_name = options.builder_name or slave_utils.SlaveBuildName(chrome_dir) build_name = re.sub('[ .()]', '_', build_name) last_change = str(slave_utils.SubversionRevision(chrome_dir)) print 'last change: %s' % last_change print 'build name: %s' % build_name print 'host name: %s' % socket.gethostname() # Where to save layout test results. dest_parent_dir = os.path.join(config.Archive.www_dir_base, results_dir_basename.replace('-', '_'), build_name) dest_dir = os.path.join(dest_parent_dir, last_change) gs_bucket = options.factory_properties.get('gs_bucket', None) if gs_bucket: gs_base = '/'.join([gs_bucket, build_name, last_change]) gs_acl = options.factory_properties.get('gs_acl', None) slave_utils.GSUtilCopyFile(zip_file, gs_base, gs_acl=gs_acl) slave_utils.GSUtilCopyFile(full_results_json, gs_base, gs_acl=gs_acl) else: slave_utils.MaybeMakeDirectoryOnArchiveHost(dest_dir) slave_utils.CopyFileToArchiveHost(zip_file, dest_dir) slave_utils.CopyFileToArchiveHost(full_results_json, dest_dir) # Not supported on Google Storage yet. _ArchiveFullLayoutTestResults(staging_dir, dest_parent_dir, diff_file_list, options) return 0
def write_timestamp_file(timestamp_file_name, timestamp_value, gs_base=None, gs_relative_dir=None, gs_acl=None, local_dir=None): """Adds a timestamp file to a Google Storage and/or a Local Directory. If gs_base, gs_relative_dir and gs_acl are provided then the timestamp is written to Google Storage. If local_dir is provided then the timestamp is written to a local directory. """ timestamp_file = os.path.join(tempfile.gettempdir(), timestamp_file_name) f = open(timestamp_file, 'w') try: f.write(str(timestamp_value)) finally: f.close() if local_dir: shutil.copyfile(timestamp_file, os.path.join(local_dir, timestamp_file_name)) if gs_base and gs_relative_dir and gs_acl: slave_utils.GSUtilCopyFile(filename=timestamp_file, gs_base=gs_base, subdir=gs_relative_dir, gs_acl=gs_acl)
def MyCopyFileToGS(filename, gs_base, gs_subdir, mimetype=None, gs_acl=None): status = slave_utils.GSUtilCopyFile(filename, gs_base, gs_subdir, mimetype, gs_acl) if status != 0: dest = gs_base + '/' + gs_subdir raise GSUtilError('GSUtilCopyFile error %d. "%s" -> "%s"' % (status, filename, dest))
def archive(options, args): build_dir, _ = chromium_utils.ConvertBuildDirToLegacy( options.build_dir, use_out=chromium_utils.IsLinux()) build_dir = os.path.join(build_dir, options.target) src_dir = os.path.abspath(os.path.dirname(options.build_dir)) staging_dir = slave_utils.GetStagingDir(src_dir) build_revision = slave_utils.SubversionRevision(src_dir) chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir) print 'Staging in %s' % build_dir # Build the list of files to archive. zip_file_list = [ f for f in os.listdir(build_dir) if ShouldPackageFile(f, options.target) ] subdir = None # TODO(nsylvain): We need to move linux to a subdir as well, but aarya is not # ready with the server-side change. if chromium_utils.IsMac(): subdir = '%s-%s' % (chromium_utils.PlatformName(), options.target.lower()) prefix = options.factory_properties.get('cf_archive_name', 'cf_archive') zip_file_name = '%s-%s-%s-%d' % (prefix, chromium_utils.PlatformName(), options.target.lower(), build_revision) (zip_dir, zip_file) = chromium_utils.MakeZip(staging_dir, zip_file_name, zip_file_list, build_dir, raise_error=True) chromium_utils.RemoveDirectory(zip_dir) if not os.path.exists(zip_file): raise StagingError('Failed to make zip package %s' % zip_file) chromium_utils.MakeWorldReadable(zip_file) # Report the size of the zip file to help catch when it gets too big. zip_size = os.stat(zip_file)[stat.ST_SIZE] print 'Zip file is %ld bytes' % zip_size gs_bucket = options.factory_properties.get('gs_bucket', None) gs_acl = options.factory_properties.get('gs_acl', None) status = slave_utils.GSUtilCopyFile(zip_file, gs_bucket, subdir=subdir, gs_acl=gs_acl) if status: raise StagingError('Failed to upload %s to %s. Error %d' % (zip_file, gs_bucket, status)) else: # Delete the file, it is not needed anymore. os.remove(zip_file) return status
def _PutTestResultsLog(platform, test_results_log): """Pushes the given test results log to google storage.""" temp_dir = util.MakeTempDir() log_name = TEST_LOG_FORMAT % platform log_path = os.path.join(temp_dir, log_name) with open(log_path, 'wb') as log_file: json.dump(test_results_log, log_file) if slave_utils.GSUtilCopyFile(log_path, GS_CHROMEDRIVER_DATA_BUCKET): raise Exception('Failed to upload test results log to google storage')
def testGSUtilCopyFileWithQuietFlag( self, # pylint: disable=no-self-use run_command_mock): slave_utils.GSUtilCopyFile('foo', 'bar', add_quiet_flag=True) run_command_mock.assert_called_with([ '/mock/gsutil', '-q', 'cp', 'file://foo', 'file://bar/foo', ])
def testGSUtilCopyFileWithDestFilename( self, # pylint: disable=no-self-use run_command_mock): slave_utils.GSUtilCopyFile('/my/local/path/foo.txt', 'gs://bucket/dest/dir', dest_filename='bar.txt') run_command_mock.assert_called_with([ '/mock/gsutil', 'cp', 'file:///my/local/path/foo.txt', 'gs://bucket/dest/dir/bar.txt', ])
def ArchiveRetrySummary(args): args.builder_name = re.sub('[ .()]', '_', args.builder_name) print 'Builder name: %s' % args.builder_name print 'Build number: %s' % args.build_number print 'Host name: %s' % socket.gethostname() gs_base = '/'.join([args.gs_bucket, args.builder_name, args.build_number]) slave_utils.GSUtilCopyFile(args.retry_summary_json, gs_base, cache_control='public, max-age=31556926', dest_filename='retry_summary.json') return 0
def MyCopyFileToGS(filename, gs_base, gs_subdir, mimetype=None, gs_acl=None): # normalize the subdir to remove duplicated slashes. This break newer versions # of gsutil. Also remove leading and ending slashes for the subdir, gsutil # adds them back autimatically and this can cause a double slash to be added. if gs_subdir: gs_subdir = gs_subdir.replace('//', '/') gs_subdir = gs_subdir.strip('/') status = slave_utils.GSUtilCopyFile(filename, gs_base, gs_subdir, mimetype, gs_acl) if status != 0: dest = gs_base + '/' + gs_subdir raise GSUtilError('GSUtilCopyFile error %d. "%s" -> "%s"' % (status, filename, dest))
def upload_to_bucket(source_filepath, dest_gsbase, subdir=None): abs_source_filepath = misc.GetAbsPath(source_filepath) print 'translated source_filepath %s to absolute path %s' % ( source_filepath, abs_source_filepath) if not os.path.exists(abs_source_filepath): raise Exception('ERROR: file not found: %s' % abs_source_filepath) status = slave_utils.GSUtilCopyFile(abs_source_filepath, dest_gsbase, subdir=subdir, gs_acl='public-read') if status != 0: raise Exception('ERROR: GSUtilCopyFile error %d. "%s" -> "%s"' % (status, abs_source_filepath, dest_gsbase)) return 0
def testGSUtilCopyCacheControl( self, # pylint: disable=R0201 run_command_mock, gs_util_setup_mock): slave_utils.GSUtilCopyFile('foo', 'bar', cache_control='mock_cache') run_command_mock.assert_called_with([ '/mock/gsutil', '-h', 'Cache-Control:mock_cache', 'cp', 'file://foo', 'file://bar/foo' ]) slave_utils.GSUtilCopyDir('foo', 'bar', cache_control='mock_cache') run_command_mock.assert_called_with([ '/mock/gsutil', '-m', '-h', 'Cache-Control:mock_cache', 'cp', '-R', 'foo', 'bar' ])
def upload_to_bucket(source_filepath, dest_gsbase): abs_source_filepath = os.path.abspath(source_filepath) print 'translated source_filepath %s to absolute path %s' % ( source_filepath, abs_source_filepath) if not os.path.exists(abs_source_filepath): raise Exception('ERROR: file not found: %s' % abs_source_filepath) status = slave_utils.GSUtilCopyFile(abs_source_filepath, dest_gsbase, gs_acl='public-read') if status != 0: raise Exception('ERROR: GSUtilCopyFile error %d. "%s" -> "%s"' % (status, abs_source_filepath, dest_gsbase)) (status, _output) = slave_utils.GSUtilListBucket(dest_gsbase, ['-l']) if status != 0: raise Exception('ERROR: failed to get list of %s, exiting' % dest_gsbase) return 0
def _RunNinjaSubTool(options, tmp_dir, sub_tool): ninja_dir = os.path.join('out', options.target) command = ['ninja', '-C', ninja_dir, '-t', sub_tool] filename = 'ninja-%s.txt' % sub_tool txt = os.path.join(tmp_dir, filename) with open(txt, 'w') as f: print 'Running command %s, saving output to %s' % (command, txt) retcode = subprocess.call(command, stdout=f) print 'Command returned %d' % retcode with open(txt) as f: txt_gz = txt + '.gz' with gzip.open(txt_gz, 'w') as g: g.writelines(f) upload_url = 'gs://blame-bot.appspot.com/%s/%s/%s' % ( options.master, options.builder, options.build) slave_utils.GSUtilCopyFile(txt_gz, upload_url)
def CopyFileToGS(self, filename, gs_base, gs_subdir, mimetype=None, gs_acl=None): # normalize the subdir to remove duplicated slashes. This break newer # versions of gsutil. Also remove leading and ending slashes for the subdir, # gsutil adds them back autimatically and this can cause a double slash to # be added. if gs_subdir: gs_subdir = gs_subdir.replace('//', '/') gs_subdir = gs_subdir.strip('/') # Construct metadata from our revision information, as available. gs_metadata = { GS_COMMIT_POSITION_NUMBER_KEY: self._chromium_revision, } # Add the commit position, if available try: gs_metadata[ GS_COMMIT_POSITION_KEY] = chromium_utils.BuildCommitPosition( *chromium_utils.GetCommitPosition(self.options)) except chromium_utils.NoIdentifiedRevision: pass # Add the git commit hash, if available try: gs_metadata[GS_GIT_COMMIT_KEY] = chromium_utils.GetGitCommit( self.options) except chromium_utils.NoIdentifiedRevision: pass status = slave_utils.GSUtilCopyFile(filename, gs_base, gs_subdir, mimetype, gs_acl, metadata=gs_metadata) if status != 0: dest = gs_base + '/' + gs_subdir raise GSUtilError('GSUtilCopyFile error %d. "%s" -> "%s"' % (status, filename, dest))
def archive(options, args): src_dir = os.path.abspath(os.path.dirname(options.build_dir)) build_dir = os.path.join(src_dir, 'out', options.target) staging_dir = slave_utils.GetStagingDir(src_dir) build_revision = slave_utils.SubversionRevision(src_dir) chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir) print 'Staging in %s' % build_dir # Build the list of files to archive. zip_file_list = [ f for f in os.listdir(build_dir) if ShouldPackageFile(f, options.target) ] prefix = options.factory_properties.get('asan_archive_name', 'asan') zip_file_name = '%s-%s-%s-%d' % (prefix, chromium_utils.PlatformName(), options.target.lower(), build_revision) (zip_dir, zip_file) = chromium_utils.MakeZip(staging_dir, zip_file_name, zip_file_list, build_dir, raise_error=True) chromium_utils.RemoveDirectory(zip_dir) if not os.path.exists(zip_file): raise StagingError('Failed to make zip package %s' % zip_file) chromium_utils.MakeWorldReadable(zip_file) # Report the size of the zip file to help catch when it gets too big. zip_size = os.stat(zip_file)[stat.ST_SIZE] print 'Zip file is %ld bytes' % zip_size gs_bucket = options.factory_properties.get('gs_bucket', None) gs_acl = options.factory_properties.get('gs_acl', None) status = slave_utils.GSUtilCopyFile(zip_file, gs_bucket, gs_acl=gs_acl) if status: raise StagingError('Failed to upload %s to %s. Error %d' % (zip_file, gs_bucket, status)) else: # Delete the file, it is not needed anymore. os.remove(zip_file) return status
def _MakeVersionedArchive(zip_file, file_suffix, options): """Takes a file name, e.g. /foo/bar.zip and an extra suffix, e.g. _baz, and copies the file to /foo/bar_baz.zip.""" zip_template = os.path.basename(zip_file) zip_base, zip_ext = os.path.splitext(zip_template) # Create a versioned copy of the file. versioned_file = zip_file.replace(zip_ext, file_suffix + zip_ext) if os.path.exists(versioned_file): # This file already exists. Maybe we are doing a clobber build at the same # revision. We can move this file away. old_file = versioned_file.replace(zip_ext, '_old' + zip_ext) chromium_utils.MoveFile(versioned_file, old_file) shutil.copyfile(zip_file, versioned_file) chromium_utils.MakeWorldReadable(versioned_file) # For chromium.perf, upload the versioned file to a GS bucket. if (options.build_properties.get('mastername') == 'chromium.perf' and options.build_properties.get('buildername') == 'Win Builder'): print 'Uploading to Google Storage...' slave_utils.GSUtilCopyFile(versioned_file, 'gs://chrome-perf/', options.build_properties['buildername']) print 'Created versioned archive', versioned_file return (zip_base, zip_ext)
def archive_layout(options, args): logging.basicConfig(level=logging.INFO, format='%(asctime)s %(filename)s:%(lineno)-3d' ' %(levelname)s %(message)s', datefmt='%y%m%d %H:%M:%S') chrome_dir = os.path.abspath(options.build_dir) results_dir_basename = os.path.basename(options.results_dir) if options.results_dir is not None: options.results_dir = os.path.abspath(os.path.join(options.build_dir, options.results_dir)) else: options.results_dir = chromium_utils.FindUpward(chrome_dir, RESULT_DIR) print 'Archiving results from %s' % options.results_dir staging_dir = options.staging_dir or slave_utils.GetStagingDir(chrome_dir) print 'Staging in %s' % staging_dir if not os.path.exists(staging_dir): os.makedirs(staging_dir) (actual_file_list, diff_file_list) = _CollectArchiveFiles(options.results_dir) zip_file = chromium_utils.MakeZip(staging_dir, results_dir_basename, actual_file_list, options.results_dir)[1] # TODO(ojan): Stop separately uploading full_results.json once garden-o-matic # switches to using failing_results.json. full_results_json = os.path.join(options.results_dir, 'full_results.json') failing_results_json = os.path.join(options.results_dir, 'failing_results.json') # Extract the build name of this slave (e.g., 'chrome-release') from its # configuration file if not provided as a param. build_name = options.builder_name or slave_utils.SlaveBuildName(chrome_dir) build_name = re.sub('[ .()]', '_', build_name) wc_dir = os.path.dirname(chrome_dir) last_change = slave_utils.GetHashOrRevision(wc_dir) # TODO(dpranke): Is it safe to assume build_number is not blank? Should we # assert() this ? build_number = str(options.build_number) print 'last change: %s' % last_change print 'build name: %s' % build_name print 'build number: %s' % build_number print 'host name: %s' % socket.gethostname() if options.gs_bucket: # Create a file containing last_change revision. This file will be uploaded # after all layout test results are uploaded so the client can check this # file to see if the upload for the revision is complete. # See crbug.com/574272 for more details. last_change_file = os.path.join(staging_dir, 'LAST_CHANGE') with open(last_change_file, 'w') as f: f.write(last_change) # Copy the results to a directory archived by build number. gs_base = '/'.join([options.gs_bucket, build_name, build_number]) gs_acl = options.gs_acl # These files never change, cache for a year. cache_control = "public, max-age=31556926" slave_utils.GSUtilCopyFile(zip_file, gs_base, gs_acl=gs_acl, cache_control=cache_control) slave_utils.GSUtilCopyDir(options.results_dir, gs_base, gs_acl=gs_acl, cache_control=cache_control) # TODO(dpranke): Remove these two lines once clients are fetching the # files from the layout-test-results dir. slave_utils.GSUtilCopyFile(full_results_json, gs_base, gs_acl=gs_acl, cache_control=cache_control) slave_utils.GSUtilCopyFile(failing_results_json, gs_base, gs_acl=gs_acl, cache_control=cache_control) slave_utils.GSUtilCopyFile(last_change_file, gs_base + '/' + results_dir_basename, gs_acl=gs_acl, cache_control=cache_control) # And also to the 'results' directory to provide the 'latest' results # and make sure they are not cached at all (Cloud Storage defaults to # caching w/ a max-age=3600). gs_base = '/'.join([options.gs_bucket, build_name, 'results']) cache_control = 'no-cache' slave_utils.GSUtilCopyFile(zip_file, gs_base, gs_acl=gs_acl, cache_control=cache_control) slave_utils.GSUtilCopyDir(options.results_dir, gs_base, gs_acl=gs_acl, cache_control=cache_control) slave_utils.GSUtilCopyFile(last_change_file, gs_base + '/' + results_dir_basename, gs_acl=gs_acl, cache_control=cache_control) else: # Where to save layout test results. dest_parent_dir = os.path.join(archive_utils.Config.www_dir_base, results_dir_basename.replace('-', '_'), build_name) dest_dir = os.path.join(dest_parent_dir, last_change) _MaybeMakeDirectoryOnArchiveHost(dest_dir) _CopyFileToArchiveHost(zip_file, dest_dir) _CopyFileToArchiveHost(full_results_json, dest_dir) _CopyFileToArchiveHost(failing_results_json, dest_dir) # Not supported on Google Storage yet. _ArchiveFullLayoutTestResults(staging_dir, dest_parent_dir, diff_file_list, options) return 0
def main(): if not os.path.exists('src'): raise Exception('ERROR: no src directory to package, exiting') completed_hour = strftime('%H') completed_filename = '%s.%s' % (FILENAME, completed_hour) partial_filename = '%s.partial' % completed_filename chromium_utils.RunCommand(['rm', '-f', partial_filename]) if os.path.exists(partial_filename): raise Exception('ERROR: %s cannot be removed, exiting' % partial_filename) print '%s: Index generation...' % time.strftime('%X') indexing_successful = GenerateIndex() print '%s: Creating tar file...' % time.strftime('%X') find_command = [ 'find', 'src/', 'tools/', 'o3d/', '-type', 'f', # The only files under src/out we want to package up # are index files and generated sources. '(', '-regex', '^src/out/.*index$', '-o', '-regex', '^src/out/[^/]*/obj/gen/.*', '-o', '!', '-regex', '^src/out/.*', ')', '-a', # Exclude all .svn directories, the native client toolchain # and the llvm build directory. '!', '-regex', r'.*\.svn.*', '-a', '!', '-regex', '^src/native_client/toolchain/.*', '-a', '!', '-regex', '^src/third_party/llvm-build/.*' ] try: if chromium_utils.RunCommand( find_command, pipes=[['tar', '-T-', '-cjvf', partial_filename] ]) != 0: raise Exception('ERROR: failed to create %s, exiting' % partial_filename) print '%s: Cleaning up google storage...' % time.strftime('%X') DeleteIfExists(completed_filename) DeleteIfExists(partial_filename) print '%s: Uploading...' % time.strftime('%X') status = slave_utils.GSUtilCopyFile(partial_filename, GSBASE, gs_acl=GSACL) if status != 0: raise Exception('ERROR: GSUtilCopyFile error %d. "%s" -> "%s"' % (status, partial_filename, GSBASE)) print '%s: Finalizing google storage...' % time.strftime('%X') status = slave_utils.GSUtilMoveFile( '%s/%s' % (GSBASE, partial_filename), '%s/%s' % (GSBASE, completed_filename), gs_acl=GSACL) if status != 0: raise Exception('ERROR: GSUtilMoveFile error %d. "%s" -> "%s"' % (status, '%s/%s' % (GSBASE, partial_filename), '%s/%s' % (GSBASE, completed_filename))) (status, output) = slave_utils.GSUtilListBucket(GSBASE) if status != 0: raise Exception('ERROR: failed to get list of GSBASE, exiting' % GSBASE) regex = re.compile('\s*\d+\s+([-:\w]+)\s+%s/%s\n' % (GSBASE, completed_filename)) match_data = regex.search(output) modified_time = None if match_data: modified_time = match_data.group(1) if not modified_time: raise Exception('ERROR: could not get modified_time, exiting') print 'Last modified time: %s' % modified_time finally: print '%s: Cleaning up locally...' % time.strftime('%X') chromium_utils.RunCommand(['rm', '-f', partial_filename]) # TODO(klimek): If this is not executed at the end of a run, we will # use leftover data on the next run; add an extra build step that # does this clean up before the build starts. if chromium_utils.RunCommand([ 'find', 'src/out', '-type', 'f', '(', '-name', '*.json-command', '-o', '-name', '*.index', ')', '-exec', 'rm', '-f', '{}', ';' ]): raise Exception('ERROR: failed to clean up indexer files') print '%s: Done.' % time.strftime('%X') if not indexing_successful: return 1 return 0
def archive(options, args): build_dir = build_directory.GetBuildOutputDirectory() src_dir = os.path.abspath(os.path.dirname(build_dir)) build_dir = os.path.join(build_dir, options.target) revision_dir = options.factory_properties.get('revision_dir') (build_revision, _) = slave_utils.GetBuildRevisions( src_dir, None, revision_dir) staging_dir = slave_utils.GetStagingDir(src_dir) chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir) print 'Staging in %s' % build_dir # Build the list of files to archive. zip_file_list = [f for f in os.listdir(build_dir) if ShouldPackageFile(f, options.target)] subdir_suffix = options.factory_properties.get('cf_archive_subdir_suffix', '') pieces = [chromium_utils.PlatformName(), options.target.lower()] if subdir_suffix: pieces.append(subdir_suffix) subdir = '-'.join(pieces) # Components like v8 get a <name>-v8-component-<revision> infix. component = '' if revision_dir: component = '-%s-component' % revision_dir prefix = options.factory_properties.get('cf_archive_name', 'cf_archive') zip_file_name = '%s-%s-%s%s-%s' % (prefix, chromium_utils.PlatformName(), options.target.lower(), component, build_revision) (zip_dir, zip_file) = chromium_utils.MakeZip(staging_dir, zip_file_name, zip_file_list, build_dir, raise_error=True) chromium_utils.RemoveDirectory(zip_dir) if not os.path.exists(zip_file): raise StagingError('Failed to make zip package %s' % zip_file) chromium_utils.MakeWorldReadable(zip_file) # Report the size of the zip file to help catch when it gets too big. zip_size = os.stat(zip_file)[stat.ST_SIZE] print 'Zip file is %ld bytes' % zip_size gs_bucket = options.factory_properties.get('gs_bucket', None) gs_acl = options.factory_properties.get('gs_acl', None) status = slave_utils.GSUtilCopyFile(zip_file, gs_bucket, subdir=subdir, gs_acl=gs_acl) if status: raise StagingError('Failed to upload %s to %s. Error %d' % (zip_file, gs_bucket, status)) else: # Delete the file, it is not needed anymore. os.remove(zip_file) return status
def archive(options, args): # Disable 'unused argument' warning for 'args' | pylint: disable=W0613 build_dir = build_directory.GetBuildOutputDirectory() src_dir = os.path.abspath(os.path.dirname(build_dir)) build_dir = os.path.join(build_dir, options.target) revision_dir = options.factory_properties.get('revision_dir') primary_project = chromium_utils.GetPrimaryProject(options) build_sortkey_branch, build_sortkey_value = GetBuildSortKey( options, primary_project) build_git_commit = GetGitCommit(options, primary_project) staging_dir = slave_utils.GetStagingDir(src_dir) chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir) print 'Staging in %s' % build_dir # Build the list of files to archive. zip_file_list = [ f for f in os.listdir(build_dir) if ShouldPackageFile(f, options.target) ] subdir_suffix = options.factory_properties.get('cf_archive_subdir_suffix', '') pieces = [chromium_utils.PlatformName(), options.target.lower()] if subdir_suffix: pieces.append(subdir_suffix) subdir = '-'.join(pieces) # Components like v8 get a <name>-v8-component-<revision> infix. component = '' if revision_dir: component = '-%s-component' % revision_dir prefix = options.factory_properties.get('cf_archive_name', 'cf_archive') sortkey_path = chromium_utils.GetSortableUploadPathForSortKey( build_sortkey_branch, build_sortkey_value) zip_file_name = '%s-%s-%s%s-%s' % (prefix, chromium_utils.PlatformName(), options.target.lower(), component, sortkey_path) (zip_dir, zip_file) = chromium_utils.MakeZip(staging_dir, zip_file_name, zip_file_list, build_dir, raise_error=True) chromium_utils.RemoveDirectory(zip_dir) if not os.path.exists(zip_file): raise StagingError('Failed to make zip package %s' % zip_file) chromium_utils.MakeWorldReadable(zip_file) # Report the size of the zip file to help catch when it gets too big. zip_size = os.stat(zip_file)[stat.ST_SIZE] print 'Zip file is %ld bytes' % zip_size gs_bucket = options.factory_properties.get('gs_bucket', None) gs_acl = options.factory_properties.get('gs_acl', None) gs_metadata = { GS_COMMIT_POSITION_NUMBER_KEY: build_sortkey_value, } if build_sortkey_branch: gs_metadata[ GS_COMMIT_POSITION_KEY] = chromium_utils.BuildCommitPosition( build_sortkey_branch, build_sortkey_value) if build_git_commit: gs_metadata[GS_GIT_COMMIT_KEY] = build_git_commit status = slave_utils.GSUtilCopyFile(zip_file, gs_bucket, subdir=subdir, gs_acl=gs_acl, metadata=gs_metadata) if status: raise StagingError('Failed to upload %s to %s. Error %d' % (zip_file, gs_bucket, status)) else: # Delete the file, it is not needed anymore. os.remove(zip_file) return status
def main(): option_parser = optparse.OptionParser() chromium_utils.AddPropertiesOptions(option_parser) options, _ = option_parser.parse_args() if not os.path.exists('src'): raise Exception('ERROR: no src directory to package, exiting') try: revision_upload_path = chromium_utils.GetSortableUploadPathForSortKey( *chromium_utils.GetBuildSortKey(options)) except chromium_utils.NoIdentifiedRevision: revision_upload_path = 'NONE' completed_filename = '%s-%s.%s' % (options.factory_properties.get( 'package_filename', FILENAME), revision_upload_path, EXT) partial_filename = '%s.partial' % completed_filename chromium_utils.RunCommand(['rm', '-f', partial_filename]) if os.path.exists(partial_filename): raise Exception('ERROR: %s cannot be removed, exiting' % partial_filename) print '%s: Index generation...' % time.strftime('%X') indexing_successful = GenerateIndex() print '%s: Creating tar file...' % time.strftime('%X') packaging_successful = True find_command = [ 'find', 'src/', 'tools/', '/usr/include/', '-type', 'f', # The only files under src/out we want to package up # are index files.... '(', '-regex', '^src/out/.*\.index$', '-o', '(', # ... and generated sources... '-regex', '^src/out/.*/gen/.*', '-a', '(', '-name', '*.h', '-o', '-name', '*.cc', '-o', '-name', '*.cpp', '-o', '-name', '*.js', ')', '-a', # ... but none of the NaCL stuff. '!', '-regex', '^src/out/[^/]*/gen/lib[^/]*/.*', '-a', '!', '-regex', '^src/out/[^/]*/gen/sdk/.*', '-a', '!', '-regex', '^src/out/[^/]*/gen/tc_.*', ')', '-o', '!', '-regex', '^src/out/.*', ')', '-a', # Exclude all .svn directories, the native client toolchain # and the llvm build directory, and perf/data files. '!', '-regex', r'.*/\.svn/.*', '-a', '!', '-regex', r'.*/\.git/.*', '-a', '!', '-regex', '^src/native_client/toolchain/.*', '-a', '!', '-regex', '^src/native_client/.*/testdata/.*', '-a', '!', '-regex', '^src/third_party/llvm-build/.*', '-a', '!', '-regex', '^src/.*/\.cvsignore', '-a', '!', '-regex', '^src/chrome/tools/test/reference_build/.*', '-a', '!', '-regex', '^tools/perf/data/.*' ] try: if chromium_utils.RunCommand( find_command, pipes=[['tar', '-T-', '-cjvf', partial_filename] ]) != 0: raise Exception('ERROR: failed to create %s, exiting' % partial_filename) print '%s: Cleaning up google storage...' % time.strftime('%X') DeleteIfExists(completed_filename) DeleteIfExists(partial_filename) print '%s: Uploading...' % time.strftime('%X') status = slave_utils.GSUtilCopyFile(partial_filename, GSBASE, gs_acl=GSACL) if status != 0: raise Exception('ERROR: GSUtilCopyFile error %d. "%s" -> "%s"' % (status, partial_filename, GSBASE)) print '%s: Finalizing google storage...' % time.strftime('%X') status = slave_utils.GSUtilMoveFile( '%s/%s' % (GSBASE, partial_filename), '%s/%s' % (GSBASE, completed_filename), gs_acl=GSACL) if status != 0: raise Exception('ERROR: GSUtilMoveFile error %d. "%s" -> "%s"' % (status, '%s/%s' % (GSBASE, partial_filename), '%s/%s' % (GSBASE, completed_filename))) (status, output) = slave_utils.GSUtilListBucket(GSBASE, ['-l']) if status != 0: raise Exception('ERROR: failed to get list of GSBASE, exiting' % GSBASE) regex = re.compile('\s*\d+\s+([-:\w]+)\s+%s/%s\n' % (GSBASE, completed_filename)) match_data = regex.search(output) modified_time = None if match_data: modified_time = match_data.group(1) if not modified_time: raise Exception('ERROR: could not get modified_time, exiting') print 'Last modified time: %s' % modified_time print '%s: Deleting old archives on google storage...' % time.strftime( '%X') regex = re.compile('\s*\d+\s+([-:\w]+)\s+(%s/.*%s.*)\n' % (GSBASE, EXT)) last_week = int(time.time()) - 7 * 24 * 60 * 60 for match_data in regex.finditer(output): timestamp = int( time.strftime( '%s', time.strptime(match_data.group(1), '%Y-%m-%dT%H:%M:%S'))) if timestamp < last_week: print 'Deleting %s...' % match_data.group(2) status = slave_utils.GSUtilDeleteFile(match_data.group(2)) if status != 0: raise Exception('ERROR: GSUtilDeleteFile error %d. "%s"' % (status, match_data.group(2))) except Exception, e: print str(e) packaging_successful = False