Ejemplo n.º 1
0
def GetResultsForApp(app, config, options, temp_dir):
    git_repo = config['git_repo']

    # Checkout and build in the build directory.
    checkout_dir = os.path.join(WORKING_DIR, app)

    result = {}

    if not os.path.exists(checkout_dir):
        with utils.ChangedWorkingDirectory(WORKING_DIR, quiet=options.quiet):
            GitClone(git_repo)
    elif options.pull:
        with utils.ChangedWorkingDirectory(checkout_dir, quiet=options.quiet):
            # Checkout build.gradle to avoid merge conflicts.
            if IsTrackedByGit('build.gradle'):
                GitCheckout('build.gradle')

            if not GitPull():
                result['status'] = 'failed'
                result['error_message'] = 'Unable to pull from remote'
                return result

    result['status'] = 'success'

    result_per_shrinker = BuildAppWithSelectedShrinkers(
        app, config, options, checkout_dir, temp_dir)
    for shrinker, shrinker_result in result_per_shrinker.iteritems():
        result[shrinker] = shrinker_result

    return result
Ejemplo n.º 2
0
def release_studio_or_aosp(r8_checkout, path, options, git_message):
    with utils.ChangedWorkingDirectory(path):
        if not options.use_existing_work_branch:
            subprocess.call(['repo', 'abandon', 'update-r8'])
        if not options.no_sync:
            subprocess.check_call(['repo', 'sync', '-cq', '-j', '16'])

        prebuilts_r8 = os.path.join(path, 'prebuilts', 'r8')

        if not options.use_existing_work_branch:
            with utils.ChangedWorkingDirectory(prebuilts_r8):
                subprocess.check_call(['repo', 'start', 'update-r8'])

        update_prebuilds(r8_checkout, options.version, path)

        with utils.ChangedWorkingDirectory(prebuilts_r8):
            if not options.use_existing_work_branch:
                subprocess.check_call(
                    ['git', 'commit', '-a', '-m', git_message])
            else:
                print('Not committing when --use-existing-work-branch. ' +
                      'Commit message should be:\n\n' + git_message + '\n')
            # Don't upload if requested not to, or if changes are not committed due
            # to --use-existing-work-branch
            if not options.no_upload and not options.use_existing_work_branch:
                process = subprocess.Popen(['repo', 'upload', '.', '--verify'],
                                           stdin=subprocess.PIPE)
                return process.communicate(input='y\n')[0]
Ejemplo n.º 3
0
def repack(apk, processed_out, resources, temp, quiet):
    processed_apk = os.path.join(temp, 'processed.apk')
    shutil.copyfile(apk, processed_apk)
    if not processed_out:
        utils.Print('Using original APK as is', quiet=quiet)
        return processed_apk
    utils.Print('Repacking APK with dex files from {}'.format(processed_apk),
                quiet=quiet)

    # Delete original dex files in APK.
    with utils.ChangedWorkingDirectory(temp, quiet=quiet):
        cmd = ['zip', '-d', 'processed.apk', '*.dex']
        utils.RunCmd(cmd, quiet=quiet)

    # Unzip the jar or zip file into `temp`.
    if processed_out.endswith('.zip') or processed_out.endswith('.jar'):
        cmd = ['unzip', processed_out, '-d', temp]
        if quiet:
            cmd.insert(1, '-q')
        utils.RunCmd(cmd, quiet=quiet)
        processed_out = temp

    # Insert the new dex and resource files from `processed_out` into the APK.
    with utils.ChangedWorkingDirectory(processed_out, quiet=quiet):
        dex_files = glob.glob('*.dex')
        resource_files = glob.glob(resources) if resources else []
        cmd = ['zip', '-u', '-9', processed_apk] + dex_files + resource_files
        utils.RunCmd(cmd, quiet=quiet)
    return processed_apk
Ejemplo n.º 4
0
    def release_google3(options):
        print "Releasing for Google 3"
        if options.dry_run:
            return 'DryRun: omitting g3 release for %s' % options.version

        google3_base = subprocess.check_output(
            ['p4', 'g4d', '-f', args.p4_client]).rstrip()
        third_party_r8 = os.path.join(google3_base, 'third_party', 'java',
                                      'r8')
        today = datetime.date.today()
        with utils.ChangedWorkingDirectory(third_party_r8):
            # download files
            g4_open('full.jar')
            g4_open('src.jar')
            g4_open('lib.jar')
            g4_open('lib.jar.map')
            g4_open('desugar_jdk_libs.json')
            g4_open('desugar_jdk_libs_configuration.jar')
            download_file(options.version, 'r8-full-exclude-deps.jar',
                          'full.jar')
            download_file(options.version, 'r8-src.jar', 'src.jar')
            download_file(options.version, 'r8lib-exclude-deps.jar', 'lib.jar')
            download_file(options.version, 'r8lib-exclude-deps.jar.map',
                          'lib.jar.map')
            download_file(options.version, 'desugar_jdk_libs.json',
                          'desugar_jdk_libs.json')
            download_file(options.version,
                          'desugar_jdk_libs_configuration.jar',
                          'desugar_jdk_libs_configuration.jar')
            g4_open('METADATA')
            sed(r'[1-9]\.[0-9]{1,2}\.[0-9]{1,3}-dev', options.version,
                os.path.join(third_party_r8, 'METADATA'))
            sed(r'\{ year.*\}', ('{ year: %i month: %i day: %i }' %
                                 (today.year, today.month, today.day)),
                os.path.join(third_party_r8, 'METADATA'))

            subprocess.check_output('chmod u+w *', shell=True)

        with utils.ChangedWorkingDirectory(google3_base):
            blaze_result = blaze_run('//third_party/java/r8:d8 -- --version')

            assert options.version in blaze_result

            if not options.no_upload:
                change_result = g4_change(options.version)
                change_result += 'Run \'(g4d ' + args.p4_client \
                                 + ' && tap_presubmit -p all --train -c ' \
                                 + get_cl_id(change_result) + ')\' for running TAP presubmit.'
                return change_result
def BuildDesugaredLibrary(checkout_dir, variant):
    if (variant != 'jdk8' and variant != 'jdk11'):
        raise Exception('Variant ' + variant + 'is not supported')
    with utils.ChangedWorkingDirectory(checkout_dir):
        bazel = os.path.join(utils.BAZEL_TOOL, 'lib', 'bazel', 'bin', 'bazel')
        cmd = [
            bazel, 'build',
            'maven_release' + ('_jdk11' if variant == 'jdk11' else '')
        ]
        utils.PrintCmd(cmd)
        subprocess.check_call(cmd)
        cmd = [bazel, 'shutdown']
        utils.PrintCmd(cmd)
        subprocess.check_call(cmd)

        # Locate the library jar and the maven zip with the jar from the
        # bazel build.
        if variant == 'jdk8':
            library_jar = os.path.join(checkout_dir, 'bazel-bin', 'src',
                                       'share', 'classes', 'java',
                                       'libjava.jar')
        else:
            library_jar = os.path.join(checkout_dir, 'bazel-bin', 'jdk11',
                                       'src', 'java_base_selected.jar')
        maven_zip = os.path.join(
            checkout_dir, 'bazel-bin',
            LIBRARY_NAME + ('_jdk11' if variant != 'jdk11' else '') + '.zip')
        return (library_jar, maven_zip)
Ejemplo n.º 6
0
def clone_repositories(quiet):
  # Clone repositories into WORKING_DIR.
  with utils.ChangedWorkingDirectory(WORKING_DIR):
    for repo in APP_REPOSITORIES:
      repo_dir = os.path.join(WORKING_DIR, repo.name)
      if not os.path.exists(repo_dir):
        GitClone(repo, repo_dir, quiet)
Ejemplo n.º 7
0
def GetResultsForApp(app, repo, options, temp_dir):
  # Checkout and build in the build directory.
  repo_name = repo.name
  repo_checkout_dir = os.path.join(WORKING_DIR, repo_name)

  result = {}

  if not os.path.exists(repo_checkout_dir) and not options.golem:
    with utils.ChangedWorkingDirectory(WORKING_DIR, quiet=options.quiet):
      GitClone(repo, repo_checkout_dir, options.quiet)

  checkout_rev = utils.get_HEAD_sha1_for_checkout(repo_checkout_dir)
  if repo.revision != checkout_rev:
    msg = 'Checkout is not target revision for {} in {}.'.format(
        app.name, repo_checkout_dir)
    if options.ignore_versions:
      warn(msg)
    else:
      raise Exception(msg)

  result['status'] = 'success'

  app_checkout_dir = (os.path.join(repo_checkout_dir, app.dir)
                      if app.dir else repo_checkout_dir)

  result_per_shrinker = BuildAppWithSelectedShrinkers(
      app, repo, options, app_checkout_dir, temp_dir)
  for shrinker, shrinker_result in result_per_shrinker.iteritems():
    result[shrinker] = shrinker_result

  return result
Ejemplo n.º 8
0
Archivo: gradle.py Proyecto: qssq/r8
def RunGradleInGetOutput(gradleCmd, args, cwd, env=None):
    EnsureDeps()
    cmd = [gradleCmd]
    cmd.extend(args)
    utils.PrintCmd(cmd)
    with utils.ChangedWorkingDirectory(cwd):
        return subprocess.check_output(cmd, env=GetJavaEnv(env))
Ejemplo n.º 9
0
def generate_file_info(path, options):
    file_info_map = {}
    with utils.ChangedWorkingDirectory(path):
        for root, dirs, files in os.walk('.'):
            for f in files:
                assert f.endswith('dex')
                file_path = os.path.join(root, f)
                entry = FileInfo(file_path, path)
                if not options.use_code_size:
                    entry.set_size(False)
                file_info_map[file_path] = entry
    threads = []
    file_infos = file_info_map.values() if options.use_code_size else []
    while len(file_infos) > 0 or len(threads) > 0:
        for t in threads:
            if not t.is_alive():
                threads.remove(t)
        # sleep
        if len(threads) == MAX_THREADS or len(file_infos) == 0:
            time.sleep(0.5)
        while len(threads) < MAX_THREADS and len(file_infos) > 0:
            info = file_infos.pop()
            print('Added %s for size calculation' % info.full_path)
            t = threading.Thread(target=info.set_size,
                                 args=(options.use_code_size, ))
            threads.append(t)
            t.start()
        print('Missing %s files, threads=%s ' %
              (len(file_infos), len(threads)))

    return file_info_map
Ejemplo n.º 10
0
def extract_apk(apk, output):
    if os.path.exists(output):
        shutil.rmtree(output)
    zipfile.ZipFile(apk).extractall(output)
    with utils.ChangedWorkingDirectory(output):
        dex = glob.glob('*.dex')
        return [os.path.join(output, dexfile) for dexfile in dex]
Ejemplo n.º 11
0
def checkout_r8(temp, branch):
    subprocess.check_call(['git', 'clone', utils.REPO_SOURCE, temp])
    with utils.ChangedWorkingDirectory(temp):
        subprocess.check_call([
            'git', 'new-branch', '--upstream',
            'origin/%s' % branch, 'dev-release'
        ])
    return temp
Ejemplo n.º 12
0
def CreateDocumentation():
  EnsureDartDoc()
  docs_out = join('out', 'docs')
  sdk = join('third_party', 'dartdoc_deps', 'dart-sdk')
  sdk_dst = join('out', 'dartdoc-dart-sdk')
  EnsureDeleted(sdk_dst)
  copytree(sdk, sdk_dst)
  copytree('lib', join(sdk_dst, 'lib', 'mobile'))
  pub = abspath(join(sdk_dst, 'bin', 'pub'))
  dartdoc = join(sdk_dst, 'bin', 'dartdoc')
  # We recreate the same structure we have in the repo in a copy to not
  # polute our workspace
  with utils.TempDir() as temp:
    # Copy Fletch packages.
    pkg_copy = join(temp, 'pkg')
    makedirs(pkg_copy)
    for pkg in SDK_PACKAGES:
      pkg_path = join('pkg', pkg)
      pkg_dst = join(pkg_copy, pkg)
      copytree(pkg_path, pkg_dst)
      print 'copied %s to %s' % (pkg_path, pkg_dst)
    # Copy third party packages.
    third_party_copy = join(temp, 'third_party')
    makedirs(third_party_copy)
    for pkg in THIRD_PARTY_PACKAGES:
      pkg_path = join('third_party', pkg)
      pkg_dst = join(third_party_copy, pkg)
      copytree(pkg_path, pkg_dst)
      print 'copied %s to %s' % (pkg_path, pkg_dst)
    # Create fake combined package dir.
    sdk_pkg_dir = join(pkg_copy, 'fletch_sdk')
    makedirs(sdk_pkg_dir)
    # Copy readme.
    copyfile(join('pkg', 'fletch_sdk_readme.md'),
             join(sdk_pkg_dir, 'README.md'))
    # Add pubspec file.
    CreateDocsPubSpec('%s/pubspec.yaml' % sdk_pkg_dir)
    # Add lib dir, and a generated file for each package.
    sdk_pkg_lib_dir = join(sdk_pkg_dir, 'lib')
    makedirs(sdk_pkg_lib_dir)
    CreateDocsLibs(sdk_pkg_lib_dir, pkg_copy)
    # Call pub get.
    with utils.ChangedWorkingDirectory(sdk_pkg_dir):
      print 'Calling pub get in %s' % sdk_pkg_dir
      subprocess.check_call([pub, 'get'])
    # Call dartdoc.
    EnsureDeleted(docs_out)
    subprocess.check_call([dartdoc, '--input', sdk_pkg_dir,'--output',
                          docs_out])

    # Patch the generated index.html file to fix a few issues.
    indexFile = join(docs_out, 'index.html')
    with open(indexFile, 'r') as fin:
      s = fin.read()
      s = s.replace('Fletch_SDK', 'Fletch SDK')
      s = s.replace('>package<', '><')
    with open(indexFile, 'w') as fout:
      fout.write(s)
Ejemplo n.º 13
0
def release_studio_or_aosp(path, options, git_message):
    with utils.ChangedWorkingDirectory(path):
        subprocess.call(['repo', 'abandon', 'update-r8'])
        if not options.no_sync:
            subprocess.check_call(['repo', 'sync', '-cq', '-j', '16'])

        prebuilts_r8 = os.path.join(path, 'prebuilts', 'r8')

        with utils.ChangedWorkingDirectory(prebuilts_r8):
            subprocess.check_call(['repo', 'start', 'update-r8'])

        update_prebuilds(options.version, path)

        with utils.ChangedWorkingDirectory(prebuilts_r8):
            subprocess.check_call(['git', 'commit', '-a', '-m', git_message])
            process = subprocess.Popen(['repo', 'upload', '.', '--verify'],
                                       stdin=subprocess.PIPE)
            return process.communicate(input='y\n')[0]
Ejemplo n.º 14
0
def read_dump(args, temp):
    if args.dump is None:
        error("A dump file or directory must be specified")
    if os.path.isdir(args.dump):
        return Dump(args.dump)
    dump_file = zipfile.ZipFile(os.path.abspath(args.dump), 'r')
    with utils.ChangedWorkingDirectory(temp):
        dump_file.extractall()
        return Dump(temp)
Ejemplo n.º 15
0
def run_aapt_split_pack(aapt, api, app):
    with utils.ChangedWorkingDirectory(get_sample_dir(app)):
        args = [
            'package', '-v', '-f', '-I',
            utils.get_android_jar(api), '-M',
            'split_manifest/AndroidManifest.xml', '-S', 'res', '-F',
            os.path.join(get_bin_path(app), 'split_resources.ap_')
        ]
        run_aapt(aapt, args)
Ejemplo n.º 16
0
def BuildAppWithSelectedShrinkers(app, repo, options, checkout_dir, temp_dir):
    result_per_shrinker = {}

    with utils.ChangedWorkingDirectory(checkout_dir, quiet=options.quiet):
        for shrinker in options.shrinker:
            apk_dest = None

            result = {}
            try:
                out_dir = os.path.join(checkout_dir, 'out', shrinker)
                (apk_dest, profile_dest_dir, proguard_config_file) = \
                    BuildAppWithShrinker(
                        app, repo, shrinker, checkout_dir, out_dir, temp_dir,
                        options)
                dex_size = ComputeSizeOfDexFilesInApk(apk_dest)
                result['apk_dest'] = apk_dest
                result['build_status'] = 'success'
                result['dex_size'] = dex_size
                result['profile_dest_dir'] = profile_dest_dir

                profile = as_utils.ParseProfileReport(profile_dest_dir)
                result['profile'] = {
                    task_name: duration
                    for task_name, duration in profile.iteritems()
                    if as_utils.IsGradleCompilerTask(task_name, shrinker)
                }
            except Exception as e:
                warn('Failed to build {} with {}'.format(app.name, shrinker))
                if e:
                    print('Error: ' + str(e))
                result['build_status'] = 'failed'

            if result.get('build_status') == 'success':
                if options.monkey:
                    result['monkey_status'] = 'success' if RunMonkey(
                        app, options, apk_dest) else 'failed'

                if 'r8' in shrinker and options.r8_compilation_steps > 1:
                    result['recompilation_results'] = \
                        ComputeRecompilationResults(
                            app, repo, options, checkout_dir, temp_dir, shrinker,
                            proguard_config_file)

                if options.run_tests and app.has_instrumentation_tests:
                    result['instrumentation_test_results'] = \
                        ComputeInstrumentationTestResults(
                            app, options, checkout_dir, out_dir, shrinker)

            result_per_shrinker[shrinker] = result

    if len(options.apps) > 1:
        print('')
        LogResultsForApp(app, result_per_shrinker, options)
        print('')

    return result_per_shrinker
Ejemplo n.º 17
0
def RunGradle(args, throw_on_failure=True):
    EnsureGradle()
    cmd = [GRADLE]
    cmd.extend(args)
    utils.PrintCmd(cmd)
    with utils.ChangedWorkingDirectory(utils.REPO_ROOT):
        return_value = subprocess.call(cmd)
        if throw_on_failure and return_value != 0:
            raise
        return return_value
Ejemplo n.º 18
0
def GitClone(repo, checkout_dir, quiet):
  result = subprocess.check_output(
      ['git', 'clone', repo.url, checkout_dir]).strip()
  head_rev = utils.get_HEAD_sha1_for_checkout(checkout_dir)
  if repo.revision == head_rev:
    return result
  warn('Target revision is not head in {}.'.format(checkout_dir))
  with utils.ChangedWorkingDirectory(checkout_dir, quiet=quiet):
    subprocess.check_output(['git', 'reset', '--hard', repo.revision])
  return result
Ejemplo n.º 19
0
Archivo: gradle.py Proyecto: qssq/r8
def RunGradleIn(gradleCmd, args, cwd, throw_on_failure=True, env=None):
    EnsureDeps()
    cmd = [gradleCmd]
    cmd.extend(args)
    utils.PrintCmd(cmd)
    with utils.ChangedWorkingDirectory(cwd):
        return_value = subprocess.call(cmd, env=GetJavaEnv(env))
        if throw_on_failure and return_value != 0:
            raise Exception('Failed to execute gradle')
        return return_value
Ejemplo n.º 20
0
def generate_file_info(path):
    file_info_map = {}
    with utils.ChangedWorkingDirectory(path):
        for root, dirs, files in os.walk('.'):
            for f in files:
                assert f.endswith('dex')
                file_path = os.path.join(root, f)
                entry = FileInfo(file_path, path)
                file_info_map[file_path] = entry
    return file_info_map
Ejemplo n.º 21
0
def run_aapt_pack(aapt, api, app):
    with utils.ChangedWorkingDirectory(get_sample_dir(app)):
        args = [
            'package', '-v', '-f', '-I',
            utils.get_android_jar(api), '-M', 'AndroidManifest.xml', '-A',
            'assets', '-S', 'res', '-m', '-J',
            get_gen_path(app), '-F',
            os.path.join(get_bin_path(app), 'resources.ap_'), '-G',
            os.path.join(get_build_dir(app), 'proguard_options')
        ]
        run_aapt(aapt, args)
Ejemplo n.º 22
0
def compile_with_javac(api, app):
    with utils.ChangedWorkingDirectory(get_sample_dir(app)):
        files = glob.glob(SRC_LOCATION.format(app=app))
        classpath = '%s:%s' % (utils.get_android_jar(api), get_guava_jar())
        command = [
            DEFAULT_JAVAC, '-classpath', classpath, '-sourcepath',
            '%s:%s:%s' %
            (get_src_path(app), get_gen_path(app), get_guava_jar()), '-d',
            get_bin_path(app)
        ]
        command.extend(files)
        utils.PrintCmd(command)
        subprocess.check_call(command)
Ejemplo n.º 23
0
def repack(processed_out, original_apk, temp, quiet):
    processed_apk = os.path.join(temp, 'processed.apk')
    shutil.copyfile(original_apk, processed_apk)
    if not processed_out:
        utils.Print('Using original APK as is', quiet=quiet)
        return processed_apk
    utils.Print('Repacking APK with dex files from {}'.format(processed_apk),
                quiet=quiet)
    with utils.ChangedWorkingDirectory(temp, quiet=quiet):
        cmd = ['zip', '-d', 'processed.apk', '*.dex']
        utils.RunCmd(cmd, quiet=quiet)
    if processed_out.endswith('.zip') or processed_out.endswith('.jar'):
        cmd = ['unzip', processed_out, '-d', temp]
        if quiet:
            cmd.insert(1, '-q')
        utils.RunCmd(cmd, quiet=quiet)
        processed_out = temp
    with utils.ChangedWorkingDirectory(processed_out, quiet=quiet):
        dex = glob.glob('*.dex')
        cmd = ['zip', '-u', '-9', processed_apk] + dex
        utils.RunCmd(cmd, quiet=quiet)
    return processed_apk
Ejemplo n.º 24
0
def main():
  # We need prodaccess to upload to x20
  utils.check_prodacces()

  working_dir = run_on_as_app.WORKING_DIR

  print 'Removing directories that do not match checked out revision'
  with utils.ChangedWorkingDirectory(working_dir):
    for repo in run_on_as_app.APP_REPOSITORIES:
      repo_dir = os.path.join(working_dir, repo.name)
      if os.path.exists(repo_dir) \
          and utils.get_HEAD_sha1_for_checkout(repo_dir) != repo.revision:
        print 'Removing %s' % repo_dir
        shutil.rmtree(repo_dir)

  print 'Downloading all missing apps'
  run_on_as_app.clone_repositories(quiet=False)

  # Package all files as x20 dependency
  parent_dir = os.path.dirname(working_dir)
  with utils.ChangedWorkingDirectory(parent_dir):
    print 'Creating archive for opensource_apps (this may take some time)'
    working_dir_name = os.path.basename(working_dir)
    repo_dirs = [working_dir_name + '/' + repo.name
                 for repo in run_on_as_app.APP_REPOSITORIES]
    filename = utils.create_archive("opensource_apps", repo_dirs)
    sha1 = utils.get_sha1(filename)
    dest = os.path.join(upload_to_x20.GMSCORE_DEPS, sha1)
    upload_to_x20.uploadFile(filename, dest)
    sha1_file = '%s.sha1' % filename
    with open(sha1_file, 'w') as output:
      output.write(sha1)
    shutil.move(sha1_file,
                os.path.join(utils.THIRD_PARTY, 'opensource_apps.tar.gz.sha1'))

  print 'To have apps benchmarked on Golem, the updated apps have to be ' \
        'downloaded to the runners by ssh\'ing into each runner and do:\n' \
        'cd ../golem\n' \
        'update_dependencies.sh\n'
Ejemplo n.º 25
0
def repack(processed_out, original_apk, temp):
    processed_apk = os.path.join(temp, 'processed.apk')
    shutil.copyfile(original_apk, processed_apk)
    if not processed_out:
        print 'Using original APK as is'
        return processed_apk
    print 'Repacking APK with dex files from', processed_apk
    with utils.ChangedWorkingDirectory(temp):
        cmd = ['zip', '-d', 'processed.apk', '*.dex']
        utils.PrintCmd(cmd)
        subprocess.check_call(cmd)
    if processed_out.endswith('.zip') or processed_out.endswith('.jar'):
        cmd = ['unzip', processed_out, '-d', temp]
        utils.PrintCmd(cmd)
        subprocess.check_call(cmd)
        processed_out = temp
    with utils.ChangedWorkingDirectory(processed_out):
        dex = glob.glob('*.dex')
        cmd = ['zip', '-u', '-9', processed_apk] + dex
        utils.PrintCmd(cmd)
        subprocess.check_call(cmd)
    return processed_apk
Ejemplo n.º 26
0
def read_dump(args, temp):
    if args.dump is None:
        error("A dump file or directory must be specified")
    if os.path.isdir(args.dump):
        return Dump(args.dump)
    dump_file = zipfile.ZipFile(os.path.abspath(args.dump), 'r')
    with utils.ChangedWorkingDirectory(temp):
        if args.override or not os.path.isfile(os.path.join(
                temp, 'r8-version')):
            print("Extracting into: %s" % temp)
            dump_file.extractall()
            if not os.path.isfile(os.path.join(temp, 'r8-version')):
                error(
                    "Did not extract into %s. Either the zip file is invalid or the "
                    "dump is missing files" % temp)
        return Dump(temp)
def main():
    working_dir = run_on_as_app.WORKING_DIR

    print 'Removing directories that do not match checked out revision'
    if not os.path.exists(working_dir):
        os.makedirs(working_dir)
    else:
        for repo in run_on_as_app.APP_REPOSITORIES:
            repo_dir = os.path.join(working_dir, repo.name)
            if os.path.exists(repo_dir) \
                and utils.get_HEAD_sha1_for_checkout(repo_dir) != repo.revision:
                print 'Removing %s' % repo_dir
                shutil.rmtree(repo_dir)

    print 'Downloading all missing apps'
    run_on_as_app.clone_repositories(quiet=False)

    # Package all files as cloud dependency
    print 'Creating archive for opensource_apps (this may take some time)'
    if os.path.exists(utils.OPENSOURCE_APPS_FOLDER):
        shutil.rmtree(utils.OPENSOURCE_APPS_FOLDER)
    for repo in run_on_as_app.APP_REPOSITORIES:
        repo_dir = os.path.join(working_dir, repo.name)
        # Ensure there is a local gradle user home in the folder
        for app in repo.apps:
            app_checkout_dir = (os.path.join(repo_dir, app.dir)
                                if app.dir else repo_dir)
            gradle_user_home = os.path.join(app_checkout_dir,
                                            run_on_as_app.GRADLE_USER_HOME)
            if not os.path.exists(gradle_user_home):
                print 'Could not find the local gradle cache at %s. You should run ' \
                      'run_on_as_app for app %s at least once.' \
                      % (gradle_user_home, repo.name)
                sys.exit(1)
        dst = os.path.join(utils.OPENSOURCE_APPS_FOLDER, repo.name)
        shutil.copytree(repo_dir, dst)

    with utils.ChangedWorkingDirectory(utils.THIRD_PARTY):
        subprocess.check_call([
            'upload_to_google_storage.py', '-a', '--bucket', 'r8-deps',
            'opensource_apps'
        ])

    print 'To have apps benchmarked on Golem, the updated apps have to be ' \
          'downloaded to the runners by ssh\'ing into each runner and do:\n' \
          'cd ../golem\n' \
          'update_dependencies.sh\n'
Ejemplo n.º 28
0
def main():
    # We need to chdir() to src/dart in order to get the correct revision number.
    with utils.ChangedWorkingDirectory(DART_PATH):
        dart_tools_utils = imp.load_source('dart_tools_utils',
                                           os.path.join('tools', 'utils.py'))
        dart_revision = dart_tools_utils.GetSVNRevision()

    version = dart_revision + '.0'
    info = upload_steps.BuildInfo(version, dart_revision)

    result = 0

    # Archive to the revision bucket unless integration build
    if info.channel != 'integration':
        result = upload_steps.ArchiveAndUpload(info, archive_latest=False)
        # On dev/stable we archive to the latest bucket as well
        if info.channel != 'be':
            result = (upload_steps.ArchiveAndUpload(info, archive_latest=True)
                      or result)

    # Run layout tests
    if info.mode == 'Release' or platform.system() != 'Darwin':
        result = Test(info, 'drt', 'layout', 'unchecked') or result
        result = Test(info, 'drt', 'layout', 'checked') or result

    # Run dartium tests
    result = Test(info, 'dartium', 'core', 'unchecked') or result
    result = Test(info, 'dartium', 'core', 'checked') or result

    # Run ContentShell tests
    # NOTE: We don't run ContentShell tests on dartium-*-inc builders to keep
    # cycle times down.
    if not info.is_incremental:
        # If we run all checked tests on dartium, we restrict the number of
        # unchecked tests on drt to DRT_FILTER
        result = Test(info, 'drt', 'core', 'unchecked',
                      test_filter=DRT_FILTER) or result
        result = Test(info, 'drt', 'core', 'checked') or result

    # On the 'be' channel, we only archive to the latest bucket if all tests were
    # successful.
    if result == 0 and info.channel == 'be':
        result = upload_steps.ArchiveAndUpload(info,
                                               archive_latest=True) or result
Ejemplo n.º 29
0
def GetBuildInfo():
    """Returns a tuple (name, version, mode, do_archive) where:
    - name: A name for the build - the buildbot host if a buildbot.
    - version: A version string corresponding to this build.
    - mode: 'Debug' or 'Release'
    - arch: target architecture
    - do_archive: True iff an archive should be generated and uploaded.
    - is_trunk: True if this is a trunk build.
  """
    os.chdir(SRC_PATH)

    name = None
    version = None
    mode = 'Release'
    do_archive = False

    # Populate via builder environment variables.
    name = os.environ.get(BUILDER_NAME)

    # We need to chdir() to src/dart in order to get the correct revision number.
    with utils.ChangedWorkingDirectory(DART_PATH):
        dart_tools_utils = imp.load_source('dart_tools_utils',
                                           os.path.join('tools', 'utils.py'))
        dart_version = dart_tools_utils.GetVersion()
        match = re.search('._r(\d+)', dart_version)
        dart_revision = match.group(1)

    version = dart_revision + '.0'
    is_trunk = 'trunk' in name

    if name:
        pattern = re.match(BUILDER_PATTERN, name)
        if pattern:
            arch = 'x64' if pattern.group(1) == 'lucid64' else 'ia32'
            if pattern.group(2) == 'debug':
                mode = 'Debug'
            do_archive = True

    # Fall back if not on builder.
    if not name:
        name = socket.gethostname().split('.')[0]

    return (name, version, mode, arch, do_archive, is_trunk)
Ejemplo n.º 30
0
    def make_release(args):
        library_version = args.desugar_library[0]
        configuration_version = args.desugar_library[1]

        library_archive = DESUGAR_JDK_LIBS + '.zip'
        library_jar = DESUGAR_JDK_LIBS + '.jar'
        library_artifact_id = \
            '%s:%s:%s' % (ANDROID_TOOLS_PACKAGE, DESUGAR_JDK_LIBS, library_version)

        configuration_archive = DESUGAR_JDK_LIBS_CONFIGURATION + '.zip'

        with utils.TempDir() as temp:
            with utils.ChangedWorkingDirectory(temp):
                library_gfile = (
                    '/bigstore/r8-releases/raw/%s/%s/%s' %
                    (DESUGAR_JDK_LIBS, library_version, library_archive))
                configuration_gfile = (
                    '/bigstore/r8-releases/raw/master/%s/%s' %
                    (configuration_version, configuration_archive))

                download_gfile(library_gfile, library_archive)
                download_gfile(configuration_gfile, configuration_archive)
                check_configuration(configuration_archive)

                release_id = gmaven_publisher_stage(
                    args, [library_gfile, configuration_gfile])

                print "Staged Release ID " + release_id + ".\n"
                gmaven_publisher_stage_redir_test_info(
                    release_id, "com.android.tools:%s:%s" %
                    (DESUGAR_JDK_LIBS, library_version), library_jar)

                print
                input = raw_input("Continue with publishing [y/N]:")

                if input != 'y':
                    print 'Aborting release to Google maven'
                    sys.exit(1)

                gmaven_publisher_publish(args, release_id)

                print
                print "Published. Use the email workflow for approval."