def execute(args): # Check this early on, as the deployment at the end would fail otherwise. if common.is_git_dirty(): print('Your checkout contains uncommitted changes. Cannot proceed.') sys.exit(1) """Create a new config directory and deployment.""" verifier = DomainVerifier(args.oauth_client_secrets_path) gcloud = common.Gcloud(args.project_id) enable_services(gcloud) # Get tag for domain verification. appspot_domain = 'https://' + args.project_id + '.appspot.com/' domain_verification_tag = verifier.get_domain_verification_tag(appspot_domain) blobs_bucket = project_bucket(args.project_id, 'blobs') deployment_bucket = project_bucket(args.project_id, 'deployment') bucket_replacements = ( ('test-blobs-bucket', blobs_bucket), ('test-deployment-bucket', deployment_bucket), ('test-bigquery-bucket', project_bucket(args.project_id, 'bigquery')), ('test-backup-bucket', project_bucket(args.project_id, 'backup')), ('test-coverage-bucket', project_bucket(args.project_id, 'coverage')), ('test-fuzzer-logs-bucket', project_bucket(args.project_id, 'fuzzer-logs')), ('test-corpus-bucket', project_bucket(args.project_id, 'corpus')), ('test-quarantine-bucket', project_bucket(args.project_id, 'quarantine')), ('test-shared-corpus-bucket', project_bucket(args.project_id, 'shared-corpus')), ('test-fuzz-logs-bucket', project_bucket(args.project_id, 'fuzz-logs')), ) # Write new configs. create_new_config(gcloud, args.project_id, args.new_config_dir, domain_verification_tag, bucket_replacements, args.gce_zone) prev_dir = os.getcwd() os.chdir(args.new_config_dir) # Deploy App Engine and finish verification of domain. os.chdir(prev_dir) deploy_appengine( gcloud, args.new_config_dir, appengine_region=args.appengine_region) verifier.verify(appspot_domain) # App Engine service account requires ownership to create GCS buckets. verifier.add_owner(appspot_domain, app_engine_service_account(args.project_id)) # Create buckets now that domain is verified. create_buckets(args.project_id, [bucket for _, bucket in bucket_replacements]) # Set CORS settings on the buckets. set_cors(args.new_config_dir, [blobs_bucket]) # Set deployment bucket for the cloud project. gcloud.run('compute', 'project-info', 'add-metadata', '--metadata=deployment-bucket=' + deployment_bucket) # Deploy source zips. deploy_zips(args.new_config_dir)
def execute(args): """Deploy Clusterfuzz to Appengine.""" os.environ['ROOT_DIR'] = '.' if not os.path.exists(args.config_dir): print('Please provide a valid configuration directory.') sys.exit(1) os.environ['CONFIG_DIR_OVERRIDE'] = args.config_dir if not common.has_file_in_path('gcloud'): print('Please install gcloud.') sys.exit(1) is_ci = os.getenv('TEST_BOT_ENVIRONMENT') if not is_ci and common.is_git_dirty(): print('Your branch is dirty. Please fix before deploying.') sys.exit(1) if not common.has_file_in_path('gsutil'): print('gsutil not found in PATH.') sys.exit(1) # Build templates before deployment. appengine.build_templates() if not is_ci and not args.staging: if is_diff_origin_master(): if args.force: print('You are not on origin/master. --force is used. Continue.') for _ in range(3): print('.') time.sleep(1) print() else: print('You are not on origin/master. Please fix or use --force.') sys.exit(1) if args.staging: revision = common.compute_staging_revision() platforms = ['linux'] # No other platforms required. elif args.prod: revision = common.compute_prod_revision() platforms = list(constants.PLATFORMS.keys()) else: print('Please specify either --prod or --staging. For production ' 'deployments, you probably want to use deploy.sh from your ' 'configs directory instead.') sys.exit(1) deploy_zips = 'zips' in args.targets deploy_appengine = 'appengine' in args.targets package_zip_paths = [] if deploy_zips: for platform_name in platforms: package_zip_paths.append( package.package(revision, platform_name=platform_name)) else: # package.package calls these, so only set these up if we're not packaging, # since they can be fairly slow. appengine.symlink_dirs() common.install_dependencies('linux') with open(constants.PACKAGE_TARGET_MANIFEST_PATH, 'w') as f: f.write('%s\n' % revision) too_large_file_path = find_file_exceeding_limit('src/appengine', APPENGINE_FILESIZE_LIMIT) if too_large_file_path: print(("%s is larger than %d bytes. It wouldn't be deployed to appengine." ' Please fix.') % (too_large_file_path, APPENGINE_FILESIZE_LIMIT)) sys.exit(1) deploy_go = args.with_go if args.staging: _staging_deployment_helper(deploy_go) else: _prod_deployment_helper(args.config_dir, package_zip_paths, deploy_go, deploy_appengine) with open(constants.PACKAGE_TARGET_MANIFEST_PATH) as f: print('Source updated to %s' % f.read()) if platforms[-1] != common.get_platform(): # Make sure the installed dependencies are for the current platform. common.install_dependencies()
def package(revision, target_zip_dir=constants.PACKAGE_TARGET_ZIP_DIRECTORY, target_manifest_path=constants.PACKAGE_TARGET_MANIFEST_PATH, platform_name=None): """Prepare clusterfuzz-source.zip.""" is_ci = os.getenv('TEST_BOT_ENVIRONMENT') if not is_ci and common.is_git_dirty(): print('Your branch is dirty. Please fix before packaging.') sys.exit(1) if not _is_nodejs_up_to_date(): print( 'You do not have nodejs, or your nodejs is not at least version 4.' ) sys.exit(1) common.install_dependencies(platform_name=platform_name) # This needs to be done before packaging step to let src/appengine/config be # archived for bot. appengine.symlink_dirs() _, ls_files_output = common.execute('git -C . ls-files', print_output=False) file_paths = ls_files_output.splitlines() if not os.path.exists(target_zip_dir): os.makedirs(target_zip_dir) target_zip_name = constants.LEGACY_ZIP_NAME if platform_name: target_zip_name = platform_name + '.zip' target_zip_path = os.path.join(target_zip_dir, target_zip_name) _clear_zip(target_zip_path) output_file = zipfile.ZipFile(target_zip_path, 'w', zipfile.ZIP_DEFLATED) # Add files from git. for file_path in file_paths: if (file_path.startswith('config') or file_path.startswith('local') or file_path.startswith(os.path.join('src', 'appengine')) or file_path.startswith(os.path.join('src', 'local')) or file_path.startswith(os.path.join('src', 'python', 'tests'))): continue _add_to_zip(output_file, file_path) # These are project configuration yamls. for path in _get_files(os.path.join('src', 'appengine', 'config')): _add_to_zip(output_file, path) # These are third party dependencies. for path in _get_files(os.path.join('src', 'third_party')): _add_to_zip(output_file, path) output_file.close() with open(target_manifest_path, 'w') as f: f.write('%s\n' % revision) with zipfile.ZipFile(target_zip_path, 'a', zipfile.ZIP_DEFLATED) as f: _add_to_zip(f, target_manifest_path, constants.PACKAGE_TARGET_MANIFEST_PATH) print('Revision: %s' % revision) print() print('%s is ready.' % target_zip_path) return target_zip_path
def execute(args): """Create a new config directory and deployment.""" # Check this early on, as the deployment at the end would fail otherwise. if common.is_git_dirty(): print("Your checkout contains uncommitted changes. Cannot proceed.") sys.exit(1) verifier = DomainVerifier(args.oauth_client_secrets_path) gcloud = common.Gcloud(args.project_id) enable_services(gcloud) # Get tag for domain verification. appspot_domain = "https://" + args.project_id + ".appspot.com/" domain_verification_tag = verifier.get_domain_verification_tag( appspot_domain) blobs_bucket = project_bucket(args.project_id, "blobs") deployment_bucket = project_bucket(args.project_id, "deployment") bucket_replacements = ( ("test-blobs-bucket", blobs_bucket), ("test-deployment-bucket", deployment_bucket), ("test-bigquery-bucket", project_bucket(args.project_id, "bigquery")), ("test-backup-bucket", project_bucket(args.project_id, "backup")), ("test-coverage-bucket", project_bucket(args.project_id, "coverage")), ("test-fuzzer-logs-bucket", project_bucket(args.project_id, "fuzzer-logs")), ("test-corpus-bucket", project_bucket(args.project_id, "corpus")), ("test-quarantine-bucket", project_bucket(args.project_id, "quarantine")), ("test-shared-corpus-bucket", project_bucket(args.project_id, "shared-corpus")), ("test-fuzz-logs-bucket", project_bucket(args.project_id, "fuzz-logs")), ( "test-mutator-plugins-bucket", project_bucket(args.project_id, "mutator-plugins"), ), ) # Write new configs. create_new_config( gcloud, args.project_id, args.new_config_dir, domain_verification_tag, bucket_replacements, args.appengine_location, args.gce_zone, args.firebase_api_key, ) prev_dir = os.getcwd() os.chdir(args.new_config_dir) # Deploy App Engine and finish verification of domain. os.chdir(prev_dir) deploy_appengine(gcloud, args.new_config_dir, appengine_location=args.appengine_location) verifier.verify(appspot_domain) # App Engine service account requires: # - Domain ownership to create domain namespaced GCS buckets # - Datastore export permission for periodic backups. # - Service account signing permission for GCS uploads. service_account = app_engine_service_account(args.project_id) verifier.add_owner(appspot_domain, service_account) add_service_account_role(gcloud, args.project_id, service_account, "roles/datastore.importExportAdmin") add_service_account_role(gcloud, args.project_id, service_account, "roles/iam.serviceAccountTokenCreator") # Create buckets now that domain is verified. create_buckets(args.project_id, [bucket for _, bucket in bucket_replacements]) # Set CORS settings on the buckets. set_cors(args.new_config_dir, [blobs_bucket]) # Set deployment bucket for the cloud project. gcloud.run( "compute", "project-info", "add-metadata", "--metadata=deployment-bucket=" + deployment_bucket, ) # Deploy source zips. deploy_zips(args.new_config_dir)
def execute(args): """Deploy Clusterfuzz to Appengine.""" # TODO(ochang): Remove once python3 deployment is fixed. os.environ["CLOUDSDK_PYTHON"] = "python2" os.environ["ROOT_DIR"] = "." if not os.path.exists(args.config_dir): print("Please provide a valid configuration directory.") sys.exit(1) os.environ["CONFIG_DIR_OVERRIDE"] = args.config_dir if not common.has_file_in_path("gcloud"): print("Please install gcloud.") sys.exit(1) is_ci = os.getenv("TEST_BOT_ENVIRONMENT") if not is_ci and common.is_git_dirty(): print("Your branch is dirty. Please fix before deploying.") sys.exit(1) if not common.has_file_in_path("gsutil"): print("gsutil not found in PATH.") sys.exit(1) # Build templates before deployment. appengine.build_templates() if not is_ci and not args.staging: if is_diff_origin_master(): if args.force: print( "You are not on origin/master. --force is used. Continue.") for _ in range(3): print(".") time.sleep(1) print() else: print( "You are not on origin/master. Please fix or use --force.") sys.exit(1) if args.staging: revision = common.compute_staging_revision() platforms = ["linux"] # No other platforms required. elif args.prod: revision = common.compute_prod_revision() platforms = list(constants.PLATFORMS.keys()) else: print("Please specify either --prod or --staging. For production " "deployments, you probably want to use deploy.sh from your " "configs directory instead.") sys.exit(1) deploy_zips = "zips" in args.targets deploy_appengine = "appengine" in args.targets package_zip_paths = [] if deploy_zips: for platform_name in platforms: package_zip_paths.append( package.package(revision, platform_name=platform_name)) else: # package.package calls these, so only set these up if we're not packaging, # since they can be fairly slow. appengine.symlink_dirs() common.install_dependencies("linux") with open(constants.PACKAGE_TARGET_MANIFEST_PATH, "w") as f: f.write("%s\n" % revision) too_large_file_path = find_file_exceeding_limit("src/appengine", APPENGINE_FILESIZE_LIMIT) if too_large_file_path: print(( "%s is larger than %d bytes. It wouldn't be deployed to appengine." " Please fix.") % (too_large_file_path, APPENGINE_FILESIZE_LIMIT)) sys.exit(1) deploy_go = args.with_go if args.staging: _staging_deployment_helper(deploy_go) else: _prod_deployment_helper(args.config_dir, package_zip_paths, deploy_go, deploy_appengine) with open(constants.PACKAGE_TARGET_MANIFEST_PATH) as f: print("Source updated to %s" % f.read()) if platforms[-1] != common.get_platform(): # Make sure the installed dependencies are for the current platform. common.install_dependencies()