def execute(args): """Deploy Clusterfuzz to Appengine.""" os.environ['ROOT_DIR'] = '.' if not os.path.exists(args.config_dir): print('Please provide a valid configuration directory.') sys.exit(1) os.environ['CONFIG_DIR_OVERRIDE'] = args.config_dir if not common.has_file_in_path('gcloud'): print('Please install gcloud.') sys.exit(1) is_ci = os.getenv('TEST_BOT_ENVIRONMENT') if not is_ci and common.is_git_dirty(): print('Your branch is dirty. Please fix before deploying.') sys.exit(1) if not common.has_file_in_path('gsutil'): print('gsutil not found in PATH.') sys.exit(1) # Build templates before deployment. appengine.build_templates() if not is_ci and not args.staging: if is_diff_origin_master(): if args.force: print('You are not on origin/master. --force is used. Continue.') for _ in range(3): print('.') time.sleep(1) print() else: print('You are not on origin/master. Please fix or use --force.') sys.exit(1) if args.staging: revision = common.compute_staging_revision() platforms = ['linux'] # No other platforms required. elif args.prod: revision = common.compute_prod_revision() platforms = list(constants.PLATFORMS.keys()) else: print('Please specify either --prod or --staging. For production ' 'deployments, you probably want to use deploy.sh from your ' 'configs directory instead.') sys.exit(1) deploy_zips = 'zips' in args.targets deploy_appengine = 'appengine' in args.targets package_zip_paths = [] if deploy_zips: for platform_name in platforms: package_zip_paths.append( package.package(revision, platform_name=platform_name)) else: # package.package calls these, so only set these up if we're not packaging, # since they can be fairly slow. appengine.symlink_dirs() common.install_dependencies('linux') with open(constants.PACKAGE_TARGET_MANIFEST_PATH, 'w') as f: f.write('%s\n' % revision) too_large_file_path = find_file_exceeding_limit('src/appengine', APPENGINE_FILESIZE_LIMIT) if too_large_file_path: print(("%s is larger than %d bytes. It wouldn't be deployed to appengine." ' Please fix.') % (too_large_file_path, APPENGINE_FILESIZE_LIMIT)) sys.exit(1) deploy_go = args.with_go if args.staging: _staging_deployment_helper(deploy_go) else: _prod_deployment_helper(args.config_dir, package_zip_paths, deploy_go, deploy_appengine) with open(constants.PACKAGE_TARGET_MANIFEST_PATH) as f: print('Source updated to %s' % f.read()) if platforms[-1] != common.get_platform(): # Make sure the installed dependencies are for the current platform. common.install_dependencies()
def execute(args): """Run Python unit tests. For unittests involved appengine, sys.path needs certain modification.""" os.environ['PY_UNITTESTS'] = 'True' os.environ['CLOUDSDK_PYTHON'] = 'python2' if os.getenv('INTEGRATION') or os.getenv('UNTRUSTED_RUNNER_TESTS'): # Set up per-user buckets used by integration tests. os.environ['CORPUS_BUCKET'] = common.test_bucket('TEST_CORPUS_BUCKET') os.environ['QUARANTINE_BUCKET'] = common.test_bucket( 'TEST_QUARANTINE_BUCKET') os.environ['BACKUP_BUCKET'] = common.test_bucket('TEST_BACKUP_BUCKET') os.environ['COVERAGE_BUCKET'] = common.test_bucket( 'TEST_COVERAGE_BUCKET') # Kill leftover instances of emulators and dev appserver. common.kill_leftover_emulators() # Don't use absolute paths to make it easier to compare results in tests. os.environ['CONFIG_DIR_OVERRIDE'] = os.path.join('.', 'configs', 'test') top_level_dir = os.path.join('src', 'python') if args.target == 'appengine': # Build template files. appengine.build_templates() test_directory = APPENGINE_TEST_DIRECTORY sys.path.insert(0, os.path.abspath(os.path.join('src', 'appengine'))) for i, path in enumerate(sys.path): if 'third_party' in path: # Replace third_party with App Engine third_party/. sys.path[i] = os.path.abspath( os.path.join('src', 'appengine', 'third_party')) if sys.version_info.major == 2: # TODO(ochang): Remove once migrated to Python 3. appengine_sdk_path = appengine.find_sdk_path() sys.path.insert(0, appengine_sdk_path) # Get additional App Engine third party imports. import dev_appserver dev_appserver.fix_google_path() sys.path.extend(dev_appserver.EXTRA_PATHS) # Loading appengine_main from the current project ensures that any # changes to configuration there are available to all tests (e.g. # sys.path modifications, namespaces, etc.) try: from src.appengine import main as appengine_main (appengine_main) # pylint: disable=pointless-statement except ImportError: print('Note: unable to import appengine_main.') # google.auth uses App Engine credentials based on importability of # google.appengine.api.app_identity. try: from google.auth import app_engine as auth_app_engine if auth_app_engine.app_identity: auth_app_engine.app_identity = None except ImportError: pass elif args.target == 'core': test_directory = CORE_TEST_DIRECTORY else: # Config module tests. os.environ['CONFIG_DIR_OVERRIDE'] = args.config_dir test_directory = os.path.join(args.config_dir, 'modules') top_level_dir = None # Modules may use libs from our App Engine directory. sys.path.insert(0, os.path.abspath(os.path.join('src', 'appengine'))) # Fix paths again to get config modules added to the import path. from python.base import modules modules.fix_module_search_paths() # Set expected environment variables. local_config.ProjectConfig().set_environment() # Needed for NDB to work with cloud datastore emulator. os.environ['DATASTORE_USE_PROJECT_ID_AS_APP_ID'] = 'true' if args.verbose: # Force logging to console for this process and child processes. os.environ['LOG_TO_CONSOLE'] = 'True' else: # Disable logging. logging.disable(logging.CRITICAL) if args.pattern is None: args.pattern = '*_test.py' if args.parallel: # TODO(tanin): Support coverage. run_tests_parallel(args, test_directory, top_level_dir) else: run_tests_single_core(args, test_directory, top_level_dir)
def execute(args): """Run Python unit tests. For unittests involved appengine, sys.path needs certain modification.""" os.environ["PY_UNITTESTS"] = "True" if os.getenv("INTEGRATION") or os.getenv("UNTRUSTED_RUNNER_TESTS"): # Set up per-user buckets used by integration tests. os.environ["CORPUS_BUCKET"] = common.test_bucket("TEST_CORPUS_BUCKET") os.environ["QUARANTINE_BUCKET"] = common.test_bucket( "TEST_QUARANTINE_BUCKET") os.environ["BACKUP_BUCKET"] = common.test_bucket("TEST_BACKUP_BUCKET") os.environ["COVERAGE_BUCKET"] = common.test_bucket( "TEST_COVERAGE_BUCKET") # Kill leftover instances of emulators and dev appserver. common.kill_leftover_emulators() # Don't use absolute paths to make it easier to compare results in tests. os.environ["CONFIG_DIR_OVERRIDE"] = os.path.join(".", "configs", "test") top_level_dir = os.path.join("src", "python") if args.target == "appengine": # Build template files. appengine.build_templates() test_directory = APPENGINE_TEST_DIRECTORY sys.path.insert(0, os.path.abspath(os.path.join("src", "appengine"))) # Get additional App Engine third party imports. import dev_appserver sys.path.extend(dev_appserver.EXTRA_PATHS) # Loading appengine_config from the current project ensures that any # changes to configuration there are available to all tests (e.g. # sys.path modifications, namespaces, etc.) try: from src.appengine import appengine_config (appengine_config) # pylint: disable=pointless-statement except ImportError: print("Note: unable to import appengine_config.") elif args.target == "core": test_directory = CORE_TEST_DIRECTORY else: # Config module tests. os.environ["CONFIG_DIR_OVERRIDE"] = args.config_dir test_directory = os.path.join(args.config_dir, "modules") top_level_dir = None # Modules may use libs from our App Engine directory. sys.path.insert(0, os.path.abspath(os.path.join("src", "appengine"))) # Fix paths again to get config modules added to the import path. from python.base import modules modules.fix_module_search_paths() # Set expected environment variables. local_config.ProjectConfig().set_environment() # Needed for NDB to work with cloud datastore emulator. os.environ["DATASTORE_USE_PROJECT_ID_AS_APP_ID"] = "true" if args.verbose: # Force logging to console for this process and child processes. os.environ["LOG_TO_CONSOLE"] = "True" else: # Disable logging. logging.disable(logging.CRITICAL) if args.pattern is None: args.pattern = "*_test.py" if args.parallel: # TODO(tanin): Support coverage. run_tests_parallel(args, test_directory, top_level_dir) else: run_tests_single_core(args, test_directory, top_level_dir)
def execute(args): """Run Python unit tests. For unittests involved appengine, sys.path needs certain modification.""" os.environ['PY_UNITTESTS'] = 'True' if os.getenv('INTEGRATION') or os.getenv('UNTRUSTED_RUNNER_TESTS'): # Set up per-user buckets used by integration tests. os.environ['CORPUS_BUCKET'] = common.test_bucket('TEST_CORPUS_BUCKET') os.environ['QUARANTINE_BUCKET'] = common.test_bucket( 'TEST_QUARANTINE_BUCKET') os.environ['BACKUP_BUCKET'] = common.test_bucket('TEST_BACKUP_BUCKET') os.environ['COVERAGE_BUCKET'] = common.test_bucket('TEST_COVERAGE_BUCKET') # Kill leftover instances of emulators and dev appserver. common.kill_leftover_emulators() # Don't use absolute paths to make it easier to compare results in tests. os.environ['CONFIG_DIR_OVERRIDE'] = os.path.join('.', 'configs', 'test') if args.target == 'appengine': # Build template files. appengine.build_templates() test_directory = APPENGINE_TEST_DIRECTORY sys.path.insert(0, os.path.abspath(os.path.join('src', 'appengine'))) # Get additional App Engine third party imports. import dev_appserver sys.path.extend(dev_appserver.EXTRA_PATHS) # Loading appengine_config from the current project ensures that any # changes to configuration there are available to all tests (e.g. # sys.path modifications, namespaces, etc.) try: from src.appengine import appengine_config (appengine_config) # pylint: disable=pointless-statement except ImportError: print('Note: unable to import appengine_config.') else: test_directory = CORE_TEST_DIRECTORY # Set expected environment variables. local_config.ProjectConfig().set_environment() # Needed for NDB to work with cloud datastore emulator. os.environ['DATASTORE_USE_PROJECT_ID_AS_APP_ID'] = 'true' if args.verbose: logs.configure_for_tests() else: # Disable logging. logging.disable(logging.CRITICAL) enable_coverage = args.pattern is None if args.pattern is None: args.pattern = '*_test.py' if args.parallel: # TODO(tanin): Support coverage. run_tests_parallel(args, test_directory) else: run_tests_single_core(args, test_directory, enable_coverage)
def execute(args): """Run the server.""" os.environ['LOCAL_DEVELOPMENT'] = 'True' common.kill_leftover_emulators() if not args.skip_install_deps: common.install_dependencies() # Do this everytime as a past deployment might have changed these. appengine.symlink_dirs() # Deploy all yaml files from test project for basic appengine deployment and # local testing to work. This needs to be called on every iteration as a past # deployment might have overwritten or deleted these config files. yaml_paths = local_config.GAEConfig().get_absolute_path('deployment.prod3') appengine.copy_yamls_and_preprocess(yaml_paths) # Build templates. appengine.build_templates() # Clean storage directory if needed. if args.bootstrap or args.clean: if os.path.exists(args.storage_path): print('Clearing local datastore by removing %s.' % args.storage_path) shutil.rmtree(args.storage_path) if not os.path.exists(args.storage_path): os.makedirs(args.storage_path) # Set up local GCS buckets and symlinks. bootstrap_gcs(args.storage_path) # Start pubsub emulator. pubsub_emulator = test_utils.start_cloud_emulator( 'pubsub', args=['--host-port=' + constants.PUBSUB_EMULATOR_HOST], data_dir=args.storage_path) test_utils.setup_pubsub(constants.TEST_APP_ID) # Start Datastore emulator datastore_emulator = test_utils.start_cloud_emulator( 'datastore', args=['--host-port=' + constants.DATASTORE_EMULATOR_HOST], data_dir=args.storage_path, store_on_disk=True) # Start our custom GCS emulator. local_gcs = common.execute_async('go run emulators/gcs.go -storage-path=' + args.storage_path, cwd='local') if args.bootstrap: bootstrap_db() start_cron_threads() os.environ['APPLICATION_ID'] = constants.TEST_APP_ID os.environ['LOCAL_DEVELOPMENT'] = 'True' os.environ['LOCAL_GCS_BUCKETS_PATH'] = 'local_gcs' os.environ['LOCAL_GCS_SERVER_HOST'] = constants.LOCAL_GCS_SERVER_HOST os.environ['DATASTORE_EMULATOR_HOST'] = constants.DATASTORE_EMULATOR_HOST os.environ['PUBSUB_EMULATOR_HOST'] = constants.PUBSUB_EMULATOR_HOST os.environ['GAE_ENV'] = 'dev' try: cron_server = common.execute_async( 'gunicorn -b :{port} main:app'.format( port=constants.CRON_SERVICE_PORT), cwd=os.path.join('src', 'appengine')) common.execute('gunicorn -b :{port} main:app'.format( port=constants.DEV_APPSERVER_PORT), cwd=os.path.join('src', 'appengine')) except KeyboardInterrupt: print('Server has been stopped. Exit.') cron_server.terminate() datastore_emulator.cleanup() pubsub_emulator.cleanup() local_gcs.terminate()
def execute(args): """Run the server.""" os.environ['LOCAL_DEVELOPMENT'] = 'True' common.kill_leftover_emulators() if not args.skip_install_deps: common.install_dependencies() # Do this everytime as a past deployment might have changed these. appengine.symlink_dirs() # Deploy all yaml files from test project for basic appengine deployment and # local testing to work. This needs to be called on every iteration as a past # deployment might have overwritten or deleted these config files. yaml_paths = local_config.GAEConfig().get_absolute_path('deployment.prod') appengine.copy_yamls_and_preprocess(yaml_paths) # Build templates. appengine.build_templates() # Clean storage directory if needed. if args.bootstrap or args.clean: if os.path.exists(args.storage_path): print 'Clearing local datastore by removing %s.' % args.storage_path shutil.rmtree(args.storage_path) if not os.path.exists(args.storage_path): os.makedirs(args.storage_path) # Set up local GCS buckets and symlinks. bootstrap_gcs(args.storage_path) # Start pubsub emulator. pubsub_emulator = test_utils.start_cloud_emulator( 'pubsub', args=['--host-port=' + constants.PUBSUB_EMULATOR_HOST], data_dir=args.storage_path) test_utils.setup_pubsub(constants.TEST_APP_ID) # Start our custom GCS emulator. local_gcs = common.execute_async( 'bazel run //go/testing/gcs ' '--sandbox_writable_path=$(pwd)/../local/storage/local_gcs ' '-- -storage-path=$(pwd)/../local/storage/local_gcs', cwd='src') if args.bootstrap: bootstrap_db() start_cron_threads() try: common.execute( '{dev_appserver_path} -A {project} --skip_sdk_update_check=1 ' '--storage_path={storage_path} --port={appserver_port} ' '--admin_port={admin_port} ' '--datastore_emulator_port={datastore_emulator_port} ' '--require_indexes=true --log_level={log_level} ' '--dev_appserver_log_level={log_level} ' '--support_datastore_emulator=true ' '--env_var LOCAL_DEVELOPMENT=True ' '--env_var PUBSUB_EMULATOR_HOST={pubsub_emulator_host} ' '--env_var LOCAL_GCS_BUCKETS_PATH=local_gcs ' '--env_var LOCAL_GCS_SERVER_HOST={local_gcs_server_host} ' 'src/appengine src/appengine/cron-service.yaml'.format( dev_appserver_path=_dev_appserver_path(), project=constants.TEST_APP_ID, storage_path=args.storage_path, appserver_port=constants.DEV_APPSERVER_PORT, admin_port=constants.DEV_APPSERVER_ADMIN_PORT, datastore_emulator_port=constants.DATASTORE_EMULATOR_PORT, log_level=args.log_level, pubsub_emulator_host=constants.PUBSUB_EMULATOR_HOST, local_gcs_server_host=constants.LOCAL_GCS_SERVER_HOST)) except KeyboardInterrupt: print 'Server has been stopped. Exit.' pubsub_emulator.cleanup() local_gcs.terminate()
def execute(args): """Deploy Clusterfuzz to Appengine.""" # TODO(ochang): Remove once python3 deployment is fixed. os.environ["CLOUDSDK_PYTHON"] = "python2" os.environ["ROOT_DIR"] = "." if not os.path.exists(args.config_dir): print("Please provide a valid configuration directory.") sys.exit(1) os.environ["CONFIG_DIR_OVERRIDE"] = args.config_dir if not common.has_file_in_path("gcloud"): print("Please install gcloud.") sys.exit(1) is_ci = os.getenv("TEST_BOT_ENVIRONMENT") if not is_ci and common.is_git_dirty(): print("Your branch is dirty. Please fix before deploying.") sys.exit(1) if not common.has_file_in_path("gsutil"): print("gsutil not found in PATH.") sys.exit(1) # Build templates before deployment. appengine.build_templates() if not is_ci and not args.staging: if is_diff_origin_master(): if args.force: print( "You are not on origin/master. --force is used. Continue.") for _ in range(3): print(".") time.sleep(1) print() else: print( "You are not on origin/master. Please fix or use --force.") sys.exit(1) if args.staging: revision = common.compute_staging_revision() platforms = ["linux"] # No other platforms required. elif args.prod: revision = common.compute_prod_revision() platforms = list(constants.PLATFORMS.keys()) else: print("Please specify either --prod or --staging. For production " "deployments, you probably want to use deploy.sh from your " "configs directory instead.") sys.exit(1) deploy_zips = "zips" in args.targets deploy_appengine = "appengine" in args.targets package_zip_paths = [] if deploy_zips: for platform_name in platforms: package_zip_paths.append( package.package(revision, platform_name=platform_name)) else: # package.package calls these, so only set these up if we're not packaging, # since they can be fairly slow. appengine.symlink_dirs() common.install_dependencies("linux") with open(constants.PACKAGE_TARGET_MANIFEST_PATH, "w") as f: f.write("%s\n" % revision) too_large_file_path = find_file_exceeding_limit("src/appengine", APPENGINE_FILESIZE_LIMIT) if too_large_file_path: print(( "%s is larger than %d bytes. It wouldn't be deployed to appengine." " Please fix.") % (too_large_file_path, APPENGINE_FILESIZE_LIMIT)) sys.exit(1) deploy_go = args.with_go if args.staging: _staging_deployment_helper(deploy_go) else: _prod_deployment_helper(args.config_dir, package_zip_paths, deploy_go, deploy_appengine) with open(constants.PACKAGE_TARGET_MANIFEST_PATH) as f: print("Source updated to %s" % f.read()) if platforms[-1] != common.get_platform(): # Make sure the installed dependencies are for the current platform. common.install_dependencies()