def DownloadLatestFile(base_url, partial_name, dst): """Get the latest archived object with the given base url and partial name. Args: base_url: Base Google Storage archive URL (gs://...) containing the build. partial_name: Partial name of the archive file to download. dst: Destination file/directory where the file will be downloaded. Raises: Exception: If unable to find or download a file. """ base_url_glob = '%s/**' % base_url.rstrip('/') result = slave_utils.GSUtilListBucket(base_url_glob, ['-l']) if not result or result[0]: raise Exception('Could not find any archived files.') files = [b.split()[2] for b in result[1].split('\n') if partial_name in b] if not files: raise Exception('Could not find any matching files.') files = [distutils.version.LooseVersion(x) for x in files] newest_file = str(max(files)) slave_utils.GSUtilDownloadFile(newest_file, dst)
def _MaybeRelease(platform): """Releases a release candidate if conditions are right.""" assert platform != 'android' # Check if the current version has already been released. result, _ = slave_utils.GSUtilListBucket( '%s/%s/chromedriver_%s*' % (GS_CHROMEDRIVER_BUCKET, _GetVersion(), platform), []) if result == 0: return # Fetch Android test results. android_test_results = _GetTestResultsLog('android') # Fetch release candidates. result, output = slave_utils.GSUtilListBucket( '%s/chromedriver_%s_%s*' % (GS_CONTINUOUS_URL, platform, _GetVersion()), []) assert result == 0 and output, 'No release candidates found' candidates = [b.split('/')[-1] for b in output.strip().split('\n')] candidate_pattern = re.compile('chromedriver_%s_%s\.\d+\.zip' % (platform, _GetVersion())) # Release the first candidate build that passed Android, if any. for candidate in candidates: if not candidate_pattern.match(candidate): print 'Ignored candidate "%s"' % candidate continue revision = candidate.split('.')[-2] android_result = _RevisionState(android_test_results, int(revision)) if android_result == 'failed': print 'Android tests did not pass at revision', revision elif android_result == 'passed': print 'Android tests passed at revision', revision _Release('%s/%s' % (GS_CONTINUOUS_URL, candidate), platform) break else: print 'Android tests have not run at a revision as recent as', revision
def _MaybeRelease(platform): """Releases a release candidate if conditions are right.""" assert platform != 'android' version = _GetVersion() # Check if the current version has already been released. if _WasReleased(version, platform): return # Fetch Android test results. android_test_results = _GetTestResultsLog('android') # Fetch release candidates. result, output = slave_utils.GSUtilListBucket( '%s/chromedriver_%s_%s*' % (GS_CONTINUOUS_URL, platform, version), []) assert result == 0 and output, 'No release candidates found' candidate_pattern = re.compile(r'.*/chromedriver_%s_%s\.(\d+)\.zip$' % (platform, version)) candidates = [] for line in output.strip().split('\n'): result = candidate_pattern.match(line) if not result: print 'Ignored line "%s"' % line continue candidates.append(int(result.group(1))) # Release the latest candidate build that passed Android, if any. # In this way, if a hot fix is needed, we can delete the release from # the chromedriver bucket instead of bumping up the release version number. candidates.sort(reverse=True) for commit_position in candidates: # Due to Android test bot migration (https://crbug.com/790300), # temporarily disabling checking the Android test results. # Android tests are being verified manually. #android_result = _CommitPositionState(android_test_results, commit_position) android_result = 'passed' if android_result == 'failed': print 'Android tests did not pass at commit position', commit_position elif android_result == 'passed': print 'Android tests passed at commit position', commit_position candidate = 'chromedriver_%s_%s.%s.zip' % (platform, version, commit_position) _Release('%s/%s' % (GS_CONTINUOUS_URL, candidate), version, platform) break else: print 'Android tests have not run at a commit position as recent as', \ commit_position
def list_storage_directory(dest_gsbase, subdir): """List the contents of the specified Storage directory.""" gsbase_subdir = posixpath.join(dest_gsbase, subdir) status, output_gsutil_ls = slave_utils.GSUtilListBucket(gsbase_subdir, []) if status != 0: raise Exception('Could not list contents of %s in Google Storage!' % gsbase_subdir) gs_files = [] for line in set(output_gsutil_ls.splitlines()): # Ignore lines with warnings and status messages. if line and line.startswith(gsbase_subdir) and line != gsbase_subdir: gs_files.append(line) return gs_files
def _MaybeUploadReleaseNotes(version): """Upload release notes if conditions are right.""" # Check if the current version has already been released. notes_name = 'notes.txt' notes_url = '%s/%s/%s' % (GS_CHROMEDRIVER_BUCKET, version, notes_name) prev_version = '.'.join( [version.split('.')[0], str(int(version.split('.')[1]) - 1)]) prev_notes_url = '%s/%s/%s' % (GS_CHROMEDRIVER_BUCKET, prev_version, notes_name) result, _ = slave_utils.GSUtilListBucket(notes_url, []) if result == 0: return fixed_issues = [] query = ('https://code.google.com/p/chromedriver/issues/csv?' 'can=1&q=label%%3AChromeDriver-%s&colspec=ID%%20Summary' % version) issues = StringIO.StringIO(_GetWebPageContent(query).split('\n', 1)[1]) for issue in csv.reader(issues): if not issue: continue issue_id = issue[0] desc = issue[1] labels = issue[2].split(', ') labels.remove('ChromeDriver-%s' % version) if 'Hotlist-GoodFirstBug' in labels: labels.remove('Hotlist-GoodFirstBug') fixed_issues += [ 'Resolved issue %s: %s [%s]' % (issue_id, desc, labels) ] old_notes = '' temp_notes_fname = tempfile.mkstemp()[1] if not slave_utils.GSUtilDownloadFile(prev_notes_url, temp_notes_fname): with open(temp_notes_fname, 'rb') as f: old_notes = f.read() new_notes = '----------ChromeDriver v%s (%s)----------\n%s\n%s\n\n%s' % ( version, datetime.date.today().isoformat(), 'Supports Chrome v%s-%s' % _GetSupportedChromeVersions(), '\n'.join(fixed_issues), old_notes) with open(temp_notes_fname, 'w') as f: f.write(new_notes) if slave_utils.GSUtilCopy(temp_notes_fname, notes_url, mimetype='text/plain'): util.MarkBuildStepError()
def DeleteIfExists(filename): """Deletes the file (relative to GSBASE), if it exists.""" (status, output) = slave_utils.GSUtilListBucket(GSBASE, ['-l']) if status != 0: raise Exception('ERROR: failed to get list of GSBASE, exiting' % GSBASE) regex = re.compile('\s*\d+\s+([-:\w]+)\s+%s/%s\n' % (GSBASE, filename)) if not regex.search(output): return status = slave_utils.GSUtilDeleteFile('%s/%s' % (GSBASE, filename)) if status != 0: raise Exception('ERROR: GSUtilDeleteFile error %d. "%s"' % (status, '%s/%s' % (GSBASE, filename)))
def upload_to_bucket(source_filepath, dest_gsbase): abs_source_filepath = os.path.abspath(source_filepath) print 'translated source_filepath %s to absolute path %s' % ( source_filepath, abs_source_filepath) if not os.path.exists(abs_source_filepath): raise Exception('ERROR: file not found: %s' % abs_source_filepath) status = slave_utils.GSUtilCopyFile(abs_source_filepath, dest_gsbase, gs_acl='public-read') if status != 0: raise Exception('ERROR: GSUtilCopyFile error %d. "%s" -> "%s"' % (status, abs_source_filepath, dest_gsbase)) (status, _output) = slave_utils.GSUtilListBucket(dest_gsbase, ['-l']) if status != 0: raise Exception('ERROR: failed to get list of %s, exiting' % dest_gsbase) return 0
def _WasReleased(version, platform): """Check if the specified version is released for the given platform.""" result, _ = slave_utils.GSUtilListBucket( '%s/%s/chromedriver_%s.zip' % (GS_CHROMEDRIVER_BUCKET, version, platform), []) return result == 0
def SyncBucketSubdir(directory, dest_gsbase=DEFAULT_PERFDATA_GS_BASE, subdir='', do_upload=True, do_download=True, filenames_filter=KNOWN_FILENAMES, exclude_json=False, min_download_revision=0): """ synchronizes a local directory with a cloud one dir: directory to synchronize dest_gsbase: gs:// bucket to synchronize subdir: optional subdirectory within the bucket, multiple directory levels are supported, using Unix relative path syntax ("outer/innner") do_upload: True to perform upload, False otherwise do_download: True to perform download, False otherwise filenames_filter: is a regular expression used to match known file names, and re.search(filenames_filter, file_name).group(1) must return revision number. min_download_revision: don't transfer files whose revision number (based on filenames_filter) is lower than this """ local_files = set(os.listdir(directory)) status, output_gsutil_ls = slave_utils.GSUtilListBucket( posixpath.join(dest_gsbase, subdir), []) # If there is not at least one file in that subdir, gsutil reports error. # Writing something like GsUtilExistsSubdir is a lot of pain. # We assume that the subdir does not exists, and if there is a real # issue, it will surface later. if status != 0: print 'ls failed' output_gsutil_ls = '' output_gsutil_ls = set(output_gsutil_ls.splitlines()) gsbase_subdir = posixpath.join(dest_gsbase, subdir, '') cloud_files = set() for line in output_gsutil_ls: # Ignore lines with warnings and status messages. if line.startswith(gsbase_subdir) and line != gsbase_subdir: cloud_files.add(line.replace(gsbase_subdir, '')) # Download only files not present on the local dir if do_download: to_download = cloud_files.difference(local_files) for file_name in to_download: match = re.search(filenames_filter, file_name) if not match: continue # Uploads only files not present on the cloud storage if do_upload: to_upload = local_files.difference(cloud_files) for file_name in to_upload: if file_name not in IGNORE_UPLOAD_FILENAMES: match = re.search(filenames_filter, file_name) if not match or (exclude_json and file_name.endswith('.json')): # Ignore other files, rather than raising an exception continue # Ignore force builds without a revision number. if match.group(1) != '': upload_to_bucket.upload_to_bucket(os.path.join(directory, file_name), dest_gsbase, subdir) return 0
def is_present(self): return 0 == slave_utils.GSUtilListBucket(self.url, ['-l'])[0]
def main(): option_parser = optparse.OptionParser() chromium_utils.AddPropertiesOptions(option_parser) options, _ = option_parser.parse_args() if not os.path.exists('src'): raise Exception('ERROR: no src directory to package, exiting') try: revision_upload_path = chromium_utils.GetSortableUploadPathForSortKey( *chromium_utils.GetBuildSortKey(options)) except chromium_utils.NoIdentifiedRevision: revision_upload_path = 'NONE' completed_filename = '%s-%s.%s' % (options.factory_properties.get( 'package_filename', FILENAME), revision_upload_path, EXT) partial_filename = '%s.partial' % completed_filename chromium_utils.RunCommand(['rm', '-f', partial_filename]) if os.path.exists(partial_filename): raise Exception('ERROR: %s cannot be removed, exiting' % partial_filename) print '%s: Index generation...' % time.strftime('%X') indexing_successful = GenerateIndex() print '%s: Creating tar file...' % time.strftime('%X') packaging_successful = True find_command = [ 'find', 'src/', 'tools/', '/usr/include/', '-type', 'f', # The only files under src/out we want to package up # are index files.... '(', '-regex', '^src/out/.*\.index$', '-o', '(', # ... and generated sources... '-regex', '^src/out/.*/gen/.*', '-a', '(', '-name', '*.h', '-o', '-name', '*.cc', '-o', '-name', '*.cpp', '-o', '-name', '*.js', ')', '-a', # ... but none of the NaCL stuff. '!', '-regex', '^src/out/[^/]*/gen/lib[^/]*/.*', '-a', '!', '-regex', '^src/out/[^/]*/gen/sdk/.*', '-a', '!', '-regex', '^src/out/[^/]*/gen/tc_.*', ')', '-o', '!', '-regex', '^src/out/.*', ')', '-a', # Exclude all .svn directories, the native client toolchain # and the llvm build directory, and perf/data files. '!', '-regex', r'.*/\.svn/.*', '-a', '!', '-regex', r'.*/\.git/.*', '-a', '!', '-regex', '^src/native_client/toolchain/.*', '-a', '!', '-regex', '^src/native_client/.*/testdata/.*', '-a', '!', '-regex', '^src/third_party/llvm-build/.*', '-a', '!', '-regex', '^src/.*/\.cvsignore', '-a', '!', '-regex', '^src/chrome/tools/test/reference_build/.*', '-a', '!', '-regex', '^tools/perf/data/.*' ] try: if chromium_utils.RunCommand( find_command, pipes=[['tar', '-T-', '-cjvf', partial_filename] ]) != 0: raise Exception('ERROR: failed to create %s, exiting' % partial_filename) print '%s: Cleaning up google storage...' % time.strftime('%X') DeleteIfExists(completed_filename) DeleteIfExists(partial_filename) print '%s: Uploading...' % time.strftime('%X') status = slave_utils.GSUtilCopyFile(partial_filename, GSBASE, gs_acl=GSACL) if status != 0: raise Exception('ERROR: GSUtilCopyFile error %d. "%s" -> "%s"' % (status, partial_filename, GSBASE)) print '%s: Finalizing google storage...' % time.strftime('%X') status = slave_utils.GSUtilMoveFile( '%s/%s' % (GSBASE, partial_filename), '%s/%s' % (GSBASE, completed_filename), gs_acl=GSACL) if status != 0: raise Exception('ERROR: GSUtilMoveFile error %d. "%s" -> "%s"' % (status, '%s/%s' % (GSBASE, partial_filename), '%s/%s' % (GSBASE, completed_filename))) (status, output) = slave_utils.GSUtilListBucket(GSBASE, ['-l']) if status != 0: raise Exception('ERROR: failed to get list of GSBASE, exiting' % GSBASE) regex = re.compile('\s*\d+\s+([-:\w]+)\s+%s/%s\n' % (GSBASE, completed_filename)) match_data = regex.search(output) modified_time = None if match_data: modified_time = match_data.group(1) if not modified_time: raise Exception('ERROR: could not get modified_time, exiting') print 'Last modified time: %s' % modified_time print '%s: Deleting old archives on google storage...' % time.strftime( '%X') regex = re.compile('\s*\d+\s+([-:\w]+)\s+(%s/.*%s.*)\n' % (GSBASE, EXT)) last_week = int(time.time()) - 7 * 24 * 60 * 60 for match_data in regex.finditer(output): timestamp = int( time.strftime( '%s', time.strptime(match_data.group(1), '%Y-%m-%dT%H:%M:%S'))) if timestamp < last_week: print 'Deleting %s...' % match_data.group(2) status = slave_utils.GSUtilDeleteFile(match_data.group(2)) if status != 0: raise Exception('ERROR: GSUtilDeleteFile error %d. "%s"' % (status, match_data.group(2))) except Exception, e: print str(e) packaging_successful = False
def main(): if not os.path.exists('src'): raise Exception('ERROR: no src directory to package, exiting') completed_hour = strftime('%H') completed_filename = '%s.%s' % (FILENAME, completed_hour) partial_filename = '%s.partial' % completed_filename chromium_utils.RunCommand(['rm', '-f', partial_filename]) if os.path.exists(partial_filename): raise Exception('ERROR: %s cannot be removed, exiting' % partial_filename) print '%s: Index generation...' % time.strftime('%X') indexing_successful = GenerateIndex() print '%s: Creating tar file...' % time.strftime('%X') find_command = [ 'find', 'src/', 'tools/', 'o3d/', '-type', 'f', # The only files under src/out we want to package up # are index files and generated sources. '(', '-regex', '^src/out/.*index$', '-o', '-regex', '^src/out/[^/]*/obj/gen/.*', '-o', '!', '-regex', '^src/out/.*', ')', '-a', # Exclude all .svn directories, the native client toolchain # and the llvm build directory. '!', '-regex', r'.*\.svn.*', '-a', '!', '-regex', '^src/native_client/toolchain/.*', '-a', '!', '-regex', '^src/third_party/llvm-build/.*' ] try: if chromium_utils.RunCommand( find_command, pipes=[['tar', '-T-', '-cjvf', partial_filename] ]) != 0: raise Exception('ERROR: failed to create %s, exiting' % partial_filename) print '%s: Cleaning up google storage...' % time.strftime('%X') DeleteIfExists(completed_filename) DeleteIfExists(partial_filename) print '%s: Uploading...' % time.strftime('%X') status = slave_utils.GSUtilCopyFile(partial_filename, GSBASE, gs_acl=GSACL) if status != 0: raise Exception('ERROR: GSUtilCopyFile error %d. "%s" -> "%s"' % (status, partial_filename, GSBASE)) print '%s: Finalizing google storage...' % time.strftime('%X') status = slave_utils.GSUtilMoveFile( '%s/%s' % (GSBASE, partial_filename), '%s/%s' % (GSBASE, completed_filename), gs_acl=GSACL) if status != 0: raise Exception('ERROR: GSUtilMoveFile error %d. "%s" -> "%s"' % (status, '%s/%s' % (GSBASE, partial_filename), '%s/%s' % (GSBASE, completed_filename))) (status, output) = slave_utils.GSUtilListBucket(GSBASE) if status != 0: raise Exception('ERROR: failed to get list of GSBASE, exiting' % GSBASE) regex = re.compile('\s*\d+\s+([-:\w]+)\s+%s/%s\n' % (GSBASE, completed_filename)) match_data = regex.search(output) modified_time = None if match_data: modified_time = match_data.group(1) if not modified_time: raise Exception('ERROR: could not get modified_time, exiting') print 'Last modified time: %s' % modified_time finally: print '%s: Cleaning up locally...' % time.strftime('%X') chromium_utils.RunCommand(['rm', '-f', partial_filename]) # TODO(klimek): If this is not executed at the end of a run, we will # use leftover data on the next run; add an extra build step that # does this clean up before the build starts. if chromium_utils.RunCommand([ 'find', 'src/out', '-type', 'f', '(', '-name', '*.json-command', '-o', '-name', '*.index', ')', '-exec', 'rm', '-f', '{}', ';' ]): raise Exception('ERROR: failed to clean up indexer files') print '%s: Done.' % time.strftime('%X') if not indexing_successful: return 1 return 0