def main(argv): option_parser = optparse.OptionParser() choices = ['mac', 'win32', 'win64', 'linux', 'arm', 'arm64'] option_parser.add_option('-a', '--archive', choices=choices, help='Builders to repacakge from [%s].' % '|'.join(choices)) # Verifies that the chrome executable runs option_parser.add_option('-v', '--verify', action='store_true', help='Verifies that the Chrome executes normally' 'without errors') # This option will update the revision map. option_parser.add_option('-u', '--update', action='store_true', help='Updates the list of revisions to repackage') # This option will creates the revision map. option_parser.add_option('-c', '--create', action='store_true', help='Creates the list of revisions to repackage') # Original bucket that contains perf builds option_parser.add_option( '-o', '--original', type='str', help='Google storage bucket name containing original' 'Chrome builds') # Bucket that should archive lightweight perf builds option_parser.add_option('-r', '--repackage', type='str', help='Google storage bucket name ' 'to re-archive Chrome builds') # Overwrites build archives for a given range. option_parser.add_option('-w', '--overwrite', action='store_true', dest='overwrite', help='Overwrite build archives') # Start revision for build overwrite. option_parser.add_option('-s', '--start_rev', type='str', dest='start_rev', help='Start revision for overwrite') # Start revision for build overwrite. option_parser.add_option('-e', '--end_rev', type='str', dest='end_rev', help='end revision for overwrite') verify_run = False (opts, args) = option_parser.parse_args() if opts.archive is None: print('Error: missing required parameter: --archive') option_parser.print_help() return 1 if not opts.original or not opts.repackage: raise ValueError('Need to specify original gs bucket url and' 'repackage gs bucket url') context = PathContext(opts.original, opts.repackage, opts.archive) if opts.create: create_upload_revision_map(context) if opts.update: update_upload_revision_map(context) if opts.verify: verify_run = True if opts.overwrite: if not opts.start_rev or not opts.end_rev: raise ValueError( 'Need to specify overwrite range start (-s) and end (-e)' ' revision.') revision_map = get_revision_map_for_range(int(opts.start_rev), int(opts.end_rev)) backward_rev = get_overwrite_revisions(revision_map) with open('upload_revs.json', 'w') as revision_file: json.dump(revision_map, revision_file) else: revision_map = get_revision_map(context) backward_rev = get_revisions_to_package(revision_map, context) base_dir = os.path.join('.', context.archive) # Clears any uncleared staging directories and create one bisect_repackage_utils.RemovePath(base_dir) bisect_repackage_utils.MaybeMakeDirectory(base_dir) staging_dir = os.path.abspath( tempfile.mkdtemp(prefix='staging', dir=base_dir)) repackage = RepackageJob('backward_fetch', backward_rev, revision_map, verify_run, staging_dir, context) # Multi-threading is not currently being used. But it can be used in # cases when the repackaging needs to be quicker. try: repackage.Start() repackage.WaitFor() except (KeyboardInterrupt, SystemExit): print('Cleaning up...') bisect_repackage_utils.RemovePath(staging_dir) print('Cleaning up...') bisect_repackage_utils.RemovePath(staging_dir)
def remove_created_files_and_path(files, paths): """Removes all the files and paths passed in.""" for file in files: bisect_repackage_utils.RemoveFile(file) for path in paths: bisect_repackage_utils.RemovePath(path)