def GetResultsForApp(app, repo, options, temp_dir): # Checkout and build in the build directory. repo_name = repo.name repo_checkout_dir = os.path.join(WORKING_DIR, repo_name) result = {} if not os.path.exists(repo_checkout_dir) and not options.golem: with utils.ChangedWorkingDirectory(WORKING_DIR, quiet=options.quiet): GitClone(repo, repo_checkout_dir, options.quiet) checkout_rev = utils.get_HEAD_sha1_for_checkout(repo_checkout_dir) if repo.revision != checkout_rev: msg = 'Checkout is not target revision for {} in {}.'.format( app.name, repo_checkout_dir) if options.ignore_versions: warn(msg) else: raise Exception(msg) result['status'] = 'success' app_checkout_dir = (os.path.join(repo_checkout_dir, app.dir) if app.dir else repo_checkout_dir) result_per_shrinker = BuildAppWithSelectedShrinkers( app, repo, options, app_checkout_dir, temp_dir) for shrinker, shrinker_result in result_per_shrinker.iteritems(): result[shrinker] = shrinker_result return result
def GitClone(repo, checkout_dir, quiet): result = subprocess.check_output( ['git', 'clone', repo.url, checkout_dir]).strip() head_rev = utils.get_HEAD_sha1_for_checkout(checkout_dir) if repo.revision == head_rev: return result warn('Target revision is not head in {}.'.format(checkout_dir)) with utils.ChangedWorkingDirectory(checkout_dir, quiet=quiet): subprocess.check_output(['git', 'reset', '--hard', repo.revision]) return result
def main(): working_dir = run_on_as_app.WORKING_DIR print 'Removing directories that do not match checked out revision' if not os.path.exists(working_dir): os.makedirs(working_dir) else: for repo in run_on_as_app.APP_REPOSITORIES: repo_dir = os.path.join(working_dir, repo.name) if os.path.exists(repo_dir) \ and utils.get_HEAD_sha1_for_checkout(repo_dir) != repo.revision: print 'Removing %s' % repo_dir shutil.rmtree(repo_dir) print 'Downloading all missing apps' run_on_as_app.clone_repositories(quiet=False) # Package all files as cloud dependency print 'Creating archive for opensource_apps (this may take some time)' if os.path.exists(utils.OPENSOURCE_APPS_FOLDER): shutil.rmtree(utils.OPENSOURCE_APPS_FOLDER) for repo in run_on_as_app.APP_REPOSITORIES: repo_dir = os.path.join(working_dir, repo.name) # Ensure there is a local gradle user home in the folder for app in repo.apps: app_checkout_dir = (os.path.join(repo_dir, app.dir) if app.dir else repo_dir) gradle_user_home = os.path.join(app_checkout_dir, run_on_as_app.GRADLE_USER_HOME) if not os.path.exists(gradle_user_home): print 'Could not find the local gradle cache at %s. You should run ' \ 'run_on_as_app for app %s at least once.' \ % (gradle_user_home, repo.name) sys.exit(1) dst = os.path.join(utils.OPENSOURCE_APPS_FOLDER, repo.name) shutil.copytree(repo_dir, dst) with utils.ChangedWorkingDirectory(utils.THIRD_PARTY): subprocess.check_call([ 'upload_to_google_storage.py', '-a', '--bucket', 'r8-deps', 'opensource_apps' ]) print 'To have apps benchmarked on Golem, the updated apps have to be ' \ 'downloaded to the runners by ssh\'ing into each runner and do:\n' \ 'cd ../golem\n' \ 'update_dependencies.sh\n'
def main(): # We need prodaccess to upload to x20 utils.check_prodacces() working_dir = run_on_as_app.WORKING_DIR print 'Removing directories that do not match checked out revision' with utils.ChangedWorkingDirectory(working_dir): for repo in run_on_as_app.APP_REPOSITORIES: repo_dir = os.path.join(working_dir, repo.name) if os.path.exists(repo_dir) \ and utils.get_HEAD_sha1_for_checkout(repo_dir) != repo.revision: print 'Removing %s' % repo_dir shutil.rmtree(repo_dir) print 'Downloading all missing apps' run_on_as_app.clone_repositories(quiet=False) # Package all files as x20 dependency parent_dir = os.path.dirname(working_dir) with utils.ChangedWorkingDirectory(parent_dir): print 'Creating archive for opensource_apps (this may take some time)' working_dir_name = os.path.basename(working_dir) repo_dirs = [working_dir_name + '/' + repo.name for repo in run_on_as_app.APP_REPOSITORIES] filename = utils.create_archive("opensource_apps", repo_dirs) sha1 = utils.get_sha1(filename) dest = os.path.join(upload_to_x20.GMSCORE_DEPS, sha1) upload_to_x20.uploadFile(filename, dest) sha1_file = '%s.sha1' % filename with open(sha1_file, 'w') as output: output.write(sha1) shutil.move(sha1_file, os.path.join(utils.THIRD_PARTY, 'opensource_apps.tar.gz.sha1')) print 'To have apps benchmarked on Golem, the updated apps have to be ' \ 'downloaded to the runners by ssh\'ing into each runner and do:\n' \ 'cd ../golem\n' \ 'update_dependencies.sh\n'
def main(): working_dir = run_on_as_app.WORKING_DIR print 'Removing directories that do not match checked out revision' if not os.path.exists(working_dir): os.makedirs(working_dir) else: for repo in run_on_as_app.APP_REPOSITORIES: repo_dir = os.path.join(working_dir, repo.name) if os.path.exists(repo_dir) \ and utils.get_HEAD_sha1_for_checkout(repo_dir) != repo.revision: print 'Removing %s' % repo_dir shutil.rmtree(repo_dir) print 'Downloading all missing apps' run_on_as_app.clone_repositories(quiet=False) # Package all files as cloud dependency print 'Creating archive for opensource_apps (this may take some time)' if os.path.exists(utils.OPENSOURCE_APPS_FOLDER): shutil.rmtree(utils.OPENSOURCE_APPS_FOLDER) for repo in run_on_as_app.APP_REPOSITORIES: repo_dir = os.path.join(working_dir, repo.name) dst = os.path.join(utils.OPENSOURCE_APPS_FOLDER, repo.name) shutil.copytree(repo_dir, dst) with utils.ChangedWorkingDirectory(utils.THIRD_PARTY): subprocess.check_call([ 'upload_to_google_storage.py', '-a', '--bucket', 'r8-deps', 'opensource_apps' ]) print 'To have apps benchmarked on Golem, the updated apps have to be ' \ 'downloaded to the runners by ssh\'ing into each runner and do:\n' \ 'cd ../golem\n' \ 'update_dependencies.sh\n'