def managed_redis_server(): """Run the redis server within a context manager that ends it gracefully.""" if common.is_windows_os(): raise Exception( 'The redis command line interface is not installed because your ' 'machine is on the Windows operating system. The redis server ' 'cannot start.') # Check if a redis dump file currently exists. This file contains residual # data from a previous run of the redis server. If it exists, removes the # dump file so that the redis server starts with a clean slate. if os.path.exists(common.REDIS_DUMP_PATH): os.remove(common.REDIS_DUMP_PATH) # OK to use shell=True here because we are passing string literals and # constants, so there is no risk of a shell-injection attack. proc_context = managed_process( [common.REDIS_SERVER_PATH, common.REDIS_CONF_PATH], human_readable_name='Redis Server', shell=True) with proc_context as proc: common.wait_for_port_to_be_in_use(feconf.REDISPORT) try: yield proc finally: subprocess.check_call([common.REDIS_CLI_PATH, 'shutdown', 'nosave'])
def managed_firebase_auth_emulator(recover_users=False): """Returns a context manager to manage the Firebase auth emulator. Args: recover_users: bool. Whether to recover users created by the previous instance of the Firebase auth emulator. Yields: psutil.Process. The Firebase emulator process. """ emulator_args = [ common.FIREBASE_PATH, 'emulators:start', '--only', 'auth', '--project', feconf.OPPIA_PROJECT_ID, '--config', feconf.FIREBASE_EMULATOR_CONFIG_PATH, ] emulator_args.extend( ['--import', common.FIREBASE_EMULATOR_CACHE_DIR, '--export-on-exit'] if recover_users else ['--export-on-exit', common.FIREBASE_EMULATOR_CACHE_DIR]) # OK to use shell=True here because we are passing string literals and # constants, so there is no risk of a shell-injection attack. proc_context = managed_process( emulator_args, human_readable_name='Firebase Emulator', shell=True) with proc_context as proc: common.wait_for_port_to_be_in_use(feconf.FIREBASE_EMULATOR_PORT) yield proc
def managed_cloud_datastore_emulator(clear_datastore=False): """Returns a context manager for the Cloud Datastore emulator. Args: clear_datastore: bool. Whether to delete the datastore's config and data before starting the emulator. Yields: psutil.Process. The emulator process. """ emulator_hostport = '%s:%d' % ( feconf.CLOUD_DATASTORE_EMULATOR_HOST, feconf.CLOUD_DATASTORE_EMULATOR_PORT) emulator_args = [ common.GCLOUD_PATH, 'beta', 'emulators', 'datastore', 'start', '--project', feconf.OPPIA_PROJECT_ID, '--data-dir', common.CLOUD_DATASTORE_EMULATOR_DATA_DIR, '--host-port', emulator_hostport, '--consistency=1.0', '--quiet' ] if clear_datastore: emulator_args.append('--no-store-on-disk') with contextlib.ExitStack() as stack: data_dir_exists = os.path.exists( common.CLOUD_DATASTORE_EMULATOR_DATA_DIR) if clear_datastore and data_dir_exists: # Replace it with an empty directory. shutil.rmtree(common.CLOUD_DATASTORE_EMULATOR_DATA_DIR) os.makedirs(common.CLOUD_DATASTORE_EMULATOR_DATA_DIR) elif not data_dir_exists: os.makedirs(common.CLOUD_DATASTORE_EMULATOR_DATA_DIR) # OK to use shell=True here because we are passing string literals and # constants, so there is no risk of a shell-injection attack. proc = stack.enter_context(managed_process( emulator_args, human_readable_name='Cloud Datastore Emulator', shell=True)) common.wait_for_port_to_be_in_use(feconf.CLOUD_DATASTORE_EMULATOR_PORT) # Environment variables required to communicate with the emulator. stack.enter_context(common.swap_env( 'DATASTORE_DATASET', feconf.OPPIA_PROJECT_ID)) stack.enter_context(common.swap_env( 'DATASTORE_EMULATOR_HOST', emulator_hostport)) stack.enter_context(common.swap_env( 'DATASTORE_EMULATOR_HOST_PATH', '%s/datastore' % emulator_hostport)) stack.enter_context(common.swap_env( 'DATASTORE_HOST', 'http://%s' % emulator_hostport)) stack.enter_context(common.swap_env( 'DATASTORE_PROJECT_ID', feconf.OPPIA_PROJECT_ID)) stack.enter_context(common.swap_env( 'DATASTORE_USE_PROJECT_ID_AS_APP_ID', 'true')) stack.enter_context(common.swap_env( 'GOOGLE_CLOUD_PROJECT', feconf.OPPIA_PROJECT_ID)) yield proc
def managed_elasticsearch_dev_server(): """Returns a context manager for ElasticSearch server for running tests in development mode and running a local dev server. This is only required in a development environment. Yields: psutil.Process. The ElasticSearch server process. """ # Clear previous data stored in the local cluster. if os.path.exists(common.ES_PATH_DATA_DIR): shutil.rmtree(common.ES_PATH_DATA_DIR) # -q is the quiet flag. es_args = ['%s/bin/elasticsearch' % common.ES_PATH, '-q'] # Override the default path to ElasticSearch config files. es_env = {'ES_PATH_CONF': common.ES_PATH_CONFIG_DIR} # OK to use shell=True here because we are passing string literals and # constants, so there is no risk of a shell-injection attack. proc_context = managed_process(es_args, human_readable_name='ElasticSearch Server', env=es_env, shell=True) with proc_context as proc: common.wait_for_port_to_be_in_use(feconf.ES_LOCALHOST_PORT) yield proc
def managed_dev_appserver( app_yaml_path, env=None, log_level='info', host='0.0.0.0', port=8080, admin_host='0.0.0.0', admin_port=8000, clear_datastore=False, enable_console=False, enable_host_checking=True, automatic_restart=True, skip_sdk_update_check=False): """Returns a context manager to start up and shut down a GAE dev appserver. Args: app_yaml_path: str. Path to the app.yaml file which defines the structure of the server. env: dict(str: str) or None. Defines the environment variables for the new process. log_level: str. The lowest log level generated by the application code and the development server. Expected values are: debug, info, warning, error, critical. host: str. The host name to which the app server should bind. port: int. The lowest port to which application modules should bind. admin_host: str. The host name to which the admin server should bind. admin_port: int. The port to which the admin server should bind. clear_datastore: bool. Whether to clear the datastore on startup. enable_console: bool. Whether to enable interactive console in admin view. enable_host_checking: bool. Whether to enforce HTTP Host checking for application modules, API server, and admin server. Host checking protects against DNS rebinding attacks, so only disable after understanding the security implications. automatic_restart: bool. Whether to restart instances automatically when files relevant to their module are changed. skip_sdk_update_check: bool. Whether to skip checking for SDK updates. If false, uses .appcfg_nag to decide. Yields: psutil.Process. The dev_appserver process. """ dev_appserver_args = [ common.CURRENT_PYTHON_BIN, common.DEV_APPSERVER_PATH, '--host', host, '--port', port, '--admin_host', admin_host, '--admin_port', admin_port, '--clear_datastore', 'true' if clear_datastore else 'false', '--enable_console', 'true' if enable_console else 'false', '--enable_host_checking', 'true' if enable_host_checking else 'false', '--automatic_restart', 'true' if automatic_restart else 'false', '--skip_sdk_update_check', 'true' if skip_sdk_update_check else 'false', '--log_level', log_level, '--dev_appserver_log_level', log_level, app_yaml_path ] # OK to use shell=True here because we are not passing anything that came # from an untrusted user, only other callers of the script, so there's no # risk of shell-injection attacks. proc_context = managed_process( dev_appserver_args, human_readable_name='GAE Development Server', shell=True, env=env) with proc_context as proc: common.wait_for_port_to_be_in_use(port) yield proc
def main(args=None): """Runs lighthouse checks and deletes reports.""" parsed_args = _PARSER.parse_args(args=args) if parsed_args.mode == LIGHTHOUSE_MODE_ACCESSIBILITY: lighthouse_mode = LIGHTHOUSE_MODE_ACCESSIBILITY server_mode = SERVER_MODE_DEV elif parsed_args.mode == LIGHTHOUSE_MODE_PERFORMANCE: lighthouse_mode = LIGHTHOUSE_MODE_PERFORMANCE server_mode = SERVER_MODE_PROD else: raise Exception('Invalid parameter passed in: \'%s\', please choose' 'from \'accessibility\' or \'performance\'' % parsed_args.mode) enable_webpages() atexit.register(cleanup) if lighthouse_mode == LIGHTHOUSE_MODE_PERFORMANCE: python_utils.PRINT('Building files in production mode.') # We are using --source_maps here, so that we have at least one CI check # that builds using source maps in prod env. This is to ensure that # there are no issues while deploying oppia. build.main(args=['--prod_env', '--source_maps']) elif lighthouse_mode == LIGHTHOUSE_MODE_ACCESSIBILITY: build.main(args=[]) run_webpack_compilation() else: raise Exception('Invalid lighthouse mode: \'%s\', please choose' 'from \'accessibility\' or \'performance\'' % lighthouse_mode) common.start_redis_server() # TODO(#11549): Move this to top of the file. import contextlib2 managed_dev_appserver = common.managed_dev_appserver( APP_YAML_FILENAMES[server_mode], port=GOOGLE_APP_ENGINE_PORT, clear_datastore=True, log_level='critical', skip_sdk_update_check=True) with contextlib2.ExitStack() as stack: stack.enter_context(common.managed_elasticsearch_dev_server()) if constants.EMULATOR_MODE: stack.enter_context(common.managed_firebase_auth_emulator()) stack.enter_context(managed_dev_appserver) # Wait for the servers to come up. common.wait_for_port_to_be_in_use(feconf.ES_LOCALHOST_PORT) common.wait_for_port_to_be_in_use(GOOGLE_APP_ENGINE_PORT) run_lighthouse_puppeteer_script() run_lighthouse_checks(lighthouse_mode)
def test_wait_for_port_to_be_in_use_when_port_failed_to_open(self): mock_sleep = self.exit_stack.enter_context( self.swap_with_call_counter(time, 'sleep')) self.exit_stack.enter_context( self.swap(common, 'is_port_in_use', lambda _: False)) self.exit_stack.enter_context( self.swap_with_checks(sys, 'exit', lambda _: None)) common.wait_for_port_to_be_in_use(1) self.assertEqual(mock_sleep.times_called, common.MAX_WAIT_TIME_FOR_PORT_TO_OPEN_SECS)
def test_wait_for_port_to_be_in_use_when_port_successfully_opened(self): def mock_is_port_in_use(unused_port): mock_is_port_in_use.wait_time += 1 return mock_is_port_in_use.wait_time > 10 mock_is_port_in_use.wait_time = 0 mock_sleep = self.exit_stack.enter_context(self.swap_with_call_counter( time, 'sleep')) self.exit_stack.enter_context(self.swap_with_checks( common, 'is_port_in_use', mock_is_port_in_use)) common.wait_for_port_to_be_in_use(1) self.assertEqual(mock_is_port_in_use.wait_time, 11) self.assertEqual(mock_sleep.times_called, 10)
def managed_webdriver_server(chrome_version=None): """Returns context manager to start/stop the Webdriver server gracefully. This context manager updates Google Chrome before starting the server. Args: chrome_version: str|None. The version of Google Chrome to run the tests on. If None, then the currently-installed version of Google Chrome is used instead. Yields: psutil.Process. The Webdriver process. """ if chrome_version is None: # Although there are spaces between Google and Chrome in the path, we # don't need to escape them for Popen (as opposed to on the terminal, in # which case we would need to escape them for the command to run). chrome_command = ( '/Applications/Google Chrome.app/Contents/MacOS/Google Chrome' if common.is_mac_os() else 'google-chrome') try: output = subprocess.check_output([chrome_command, '--version']) except OSError: # For the error message on macOS, we need to add the backslashes in. # This is because it is likely that a user will try to run the # command on their terminal and, as mentioned above, the macOS # chrome version command has spaces in the path which need to be # escaped for successful terminal use. raise Exception( 'Failed to execute "%s --version" command. This is used to ' 'determine the chromedriver version to use. Please set the ' 'chromedriver version manually using the ' '--chrome_driver_version flag. To determine the ' 'chromedriver version to be used, please follow the ' 'instructions mentioned in the following URL:\n' 'https://chromedriver.chromium.org/downloads/version-selection' % chrome_command.replace(' ', r'\ ')) installed_version_parts = b''.join(re.findall(rb'[0-9.]', output)) installed_version = '.'.join( installed_version_parts.decode('utf-8').split('.')[:-1]) response = utils.url_open( 'https://chromedriver.storage.googleapis.com/LATEST_RELEASE_%s' % ( installed_version)) chrome_version = response.read().decode('utf-8') print('\n\nCHROME VERSION: %s' % chrome_version) subprocess.check_call([ common.NODE_BIN_PATH, common.WEBDRIVER_MANAGER_BIN_PATH, 'update', '--versions.chrome', chrome_version, ]) with contextlib.ExitStack() as exit_stack: if common.is_windows_os(): # NOTE: webdriver-manager (version 13.0.0) uses `os.arch()` to # determine the architecture of the operating system, however, this # function can only be used to determine the architecture of the # machine that compiled `node`. In the case of Windows, we are using # the portable version, which was compiled on `ia32` machine so that # is the value returned by this `os.arch` function. Unfortunately, # webdriver-manager seems to assume that Windows wouldn't run on the # ia32 architecture, so its help function used to determine download # link returns null for this, which means that the application has # no idea about where to download the correct version. # # https://github.com/angular/webdriver-manager/blob/b7539a5a3897a8a76abae7245f0de8175718b142/lib/provider/chromedriver.ts#L16 # https://github.com/angular/webdriver-manager/blob/b7539a5a3897a8a76abae7245f0de8175718b142/lib/provider/geckodriver.ts#L21 # https://github.com/angular/webdriver-manager/blob/b7539a5a3897a8a76abae7245f0de8175718b142/lib/provider/chromedriver.ts#L167 # https://github.com/nodejs/node/issues/17036 regex_pattern = re.escape('this.osArch = os.arch();') arch = 'x64' if common.is_x64_architecture() else 'x86' replacement_string = 'this.osArch = "%s";' % arch exit_stack.enter_context(common.inplace_replace_file_context( common.CHROME_PROVIDER_FILE_PATH, regex_pattern, replacement_string)) exit_stack.enter_context(common.inplace_replace_file_context( common.GECKO_PROVIDER_FILE_PATH, regex_pattern, replacement_string)) # OK to use shell=True here because we are passing string literals and # constants, so there is no risk of a shell-injection attack. proc = exit_stack.enter_context(managed_process([ common.NODE_BIN_PATH, common.WEBDRIVER_MANAGER_BIN_PATH, 'start', '--versions.chrome', chrome_version, '--quiet', '--standalone', ], human_readable_name='Webdriver manager', shell=True)) common.wait_for_port_to_be_in_use(4444) yield proc
def run_tests(args): """Run the scripts to start end-to-end tests.""" oppia_instance_is_already_running = is_oppia_server_already_running() if oppia_instance_is_already_running: sys.exit(1) setup_and_install_dependencies(args.skip_install) common.start_redis_server() atexit.register(cleanup) dev_mode = not args.prod_env if args.skip_build: build.modify_constants(prod_env=args.prod_env) else: build_js_files(dev_mode, deparallelize_terser=args.deparallelize_terser, source_maps=args.source_maps) version = args.chrome_driver_version or get_chrome_driver_version() python_utils.PRINT('\n\nCHROMEDRIVER VERSION: %s\n\n' % version) start_webdriver_manager(version) # TODO(#11549): Move this to top of the file. import contextlib2 managed_dev_appserver = common.managed_dev_appserver( 'app.yaml' if args.prod_env else 'app_dev.yaml', port=GOOGLE_APP_ENGINE_PORT, log_level=args.server_log_level, clear_datastore=True, skip_sdk_update_check=True, env={'PORTSERVER_ADDRESS': PORTSERVER_SOCKET_FILEPATH}) with contextlib2.ExitStack() as stack: stack.enter_context(common.managed_elasticsearch_dev_server()) if constants.EMULATOR_MODE: stack.enter_context(common.managed_firebase_auth_emulator()) stack.enter_context(managed_dev_appserver) python_utils.PRINT('Waiting for servers to come up...') # Wait for the servers to come up. common.wait_for_port_to_be_in_use(feconf.ES_LOCALHOST_PORT) common.wait_for_port_to_be_in_use(WEB_DRIVER_PORT) common.wait_for_port_to_be_in_use(GOOGLE_APP_ENGINE_PORT) python_utils.PRINT('Servers have come up.') python_utils.PRINT( 'Note: If ADD_SCREENSHOT_REPORTER is set to true in ' 'core/tests/protractor.conf.js, you can view screenshots ' 'of the failed tests in ../protractor-screenshots/') commands = [common.NODE_BIN_PATH] if args.debug_mode: commands.append('--inspect-brk') # This flag ensures tests fail if waitFor calls time out. commands.append('--unhandled-rejections=strict') commands.append(PROTRACTOR_BIN_PATH) commands.extend( get_e2e_test_parameters(args.sharding_instances, args.suite, dev_mode)) p = subprocess.Popen(commands, stdout=subprocess.PIPE) output_lines = [] while True: nextline = p.stdout.readline() if len(nextline) == 0 and p.poll() is not None: break if isinstance(nextline, str): # This is a failsafe line in case we get non-unicode input, # but the tests provide all strings as unicode. nextline = nextline.decode('utf-8') # pragma: nocover output_lines.append(nextline.rstrip()) # Replaces non-ASCII characters with '?'. sys.stdout.write(nextline.encode('ascii', errors='replace')) return output_lines, p.returncode