def main(tokens=None): """Check if a PR is low-risk.""" parser = argparse.ArgumentParser() parser.add_argument('pr_url', help='The URL of the pull request.') args = parser.parse_args(args=tokens) parsed_url = parse_pr_url(args.pr_url) if not parsed_url: raise RuntimeError('Failed to parse PR URL %s' % args.pr_url) owner, repo, number = parsed_url pr = lookup_pr(owner, repo, number) if not pr: raise RuntimeError('Failed to load PR from GitHub API') base_repo_url = pr['base']['repo']['clone_url'] common.run_cmd(['git', 'remote', 'add', UPSTREAM_REMOTE, base_repo_url]) base_branch = pr['base']['ref'] common.run_cmd(['git', 'fetch', UPSTREAM_REMOTE, base_branch]) diff_files, file_diffs = load_diff(pr['base']['ref']) if not diff_files: raise RuntimeError('Failed to load PR diff') for low_risk_type, low_risk_checker in LOW_RISK_CHECKERS: reason_not_low_risk = low_risk_checker(pr, diff_files, file_diffs) if reason_not_low_risk: python_utils.PRINT( 'PR is not a low-risk PR of type %s because: %s' % (low_risk_type, reason_not_low_risk)) else: python_utils.PRINT('PR is low-risk. Skipping some CI checks.') return 0 python_utils.PRINT('PR is not low-risk. Running all CI checks.') return 1
def check_for_backend_python_library_inconsistencies(): """Checks the state of the 'third_party/python_libs' folder and compares it to the required libraries specified in 'requirements.txt'. If any inconsistencies are found, the script displays the inconsistencies and exits. """ mismatches = install_backend_python_libs.get_mismatches() if mismatches: python_utils.PRINT( 'Your currently installed python libraries do not match the\n' 'libraries listed in your "requirements.txt" file. Here is a\n' 'full list of library/version discrepancies:\n') python_utils.PRINT( '{:<35} |{:<25}|{:<25}'.format( 'Library', 'Requirements Version', 'Currently Installed Version')) for library_name, version_strings in mismatches.items(): python_utils.PRINT('{!s:<35} |{!s:<25}|{!s:<25}'.format( library_name, version_strings[0], version_strings[1])) python_utils.PRINT('\n') common.print_each_string_after_two_new_lines([ 'Please fix these discrepancies by editing the `requirements.in`\n' 'file or running `scripts.install_third_party` to regenerate\n' 'the `third_party/python_libs` directory.\n']) sys.exit(1) else: python_utils.PRINT( 'Python dependencies consistency check succeeded.')
def install_redis_cli(): """This installs the redis-cli to the local oppia third_party directory so that development servers and backend tests can make use of a local redis cache. Redis-cli installed here (redis-cli-6.0.6) is different from the redis package installed in dependencies.json (redis-3.5.3). The redis-3.5.3 package detailed in dependencies.json is the Python library that allows users to communicate with any Redis cache using Python. The redis-cli-6.0.6 package installed in this function contains C++ scripts for the redis-cli and redis-server programs detailed below. The redis-cli program is the command line interface that serves up an interpreter that allows users to connect to a redis database cache and query the cache using the Redis CLI API. It also contains functionality to shutdown the redis server. We need to install redis-cli separately from the default installation of backend libraries since it is a system program and we need to build the program files after the library is untarred. The redis-server starts a Redis database on the local machine that can be queried using either the Python redis library or the redis-cli interpreter. """ try: subprocess.call( [common.REDIS_SERVER_PATH, '--version'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) python_utils.PRINT('Redis-cli is already installed.') except OSError: # The redis-cli is not installed, run the script to install it. # NOTE: We do the installation here since we need to use make. python_utils.PRINT('Installing redis-cli...') download_and_untar_files( ('https://download.redis.io/releases/redis-%s.tar.gz') % common.REDIS_CLI_VERSION, TARGET_DOWNLOAD_DIRS['oppiaTools'], 'redis-%s' % common.REDIS_CLI_VERSION, 'redis-cli-%s' % common.REDIS_CLI_VERSION) # Temporarily change the working directory to redis-cli-6.0.6 so we can # build the source code. with common.CD( os.path.join( TARGET_DOWNLOAD_DIRS['oppiaTools'], 'redis-cli-%s' % common.REDIS_CLI_VERSION)): # Build the scripts necessary to start the redis server. # The make command only builds the C++ files in the src/ folder # without modifying anything outside of the oppia root directory. # It will build the redis-cli and redis-server files so that we can # run the server from inside the oppia folder by executing the # script src/redis-cli and src/redis-server. subprocess.call(['make']) # Make the scripts executable. subprocess.call([ 'chmod', '+x', common.REDIS_SERVER_PATH]) subprocess.call([ 'chmod', '+x', common.REDIS_CLI_PATH]) python_utils.PRINT('Redis-cli installed successfully.')
def update_developer_names(release_summary_lines): """Updates about-page.constants.ts file. Args: release_summary_lines: list(str). List of lines in ../release_summary.md. """ python_utils.PRINT('Updating about-page file...') new_developer_names = get_new_contributors( release_summary_lines, return_only_names=True) with python_utils.open_file( ABOUT_PAGE_CONSTANTS_FILEPATH, 'r') as about_page_file: about_page_lines = about_page_file.readlines() start_index = about_page_lines.index(CREDITS_START_LINE) + 1 end_index = about_page_lines[start_index:].index(CREDITS_END_LINE) + 1 all_developer_names = about_page_lines[start_index:end_index] for name in new_developer_names: all_developer_names.append('%s\'%s\',\n' % (CREDITS_INDENT, name)) all_developer_names = sorted( list(set(all_developer_names)), key=lambda s: s.lower()) about_page_lines[start_index:end_index] = all_developer_names with python_utils.open_file( ABOUT_PAGE_CONSTANTS_FILEPATH, 'w') as about_page_file: for line in about_page_lines: about_page_file.write(str(line)) python_utils.PRINT('Updated about-page file!')
def run_webpack_compilation(source_maps=False): """Runs webpack compilation. Args: source_maps: bool. Whether to compile with source maps. """ max_tries = 5 webpack_bundles_dir_name = 'webpack_bundles' for _ in range(max_tries): try: managed_webpack_compiler = ( servers.managed_webpack_compiler(use_source_maps=source_maps)) with managed_webpack_compiler as proc: proc.wait() except subprocess.CalledProcessError as error: python_utils.PRINT(error.output) sys.exit(error.returncode) return if os.path.isdir(webpack_bundles_dir_name): break else: # We didn't break out of the loop, meaning all attempts have failed. python_utils.PRINT('Failed to complete webpack compilation, exiting...') sys.exit(1)
def is_order_of_sections_valid(release_summary_lines): """Checks that the ordering of sections in release_summary file matches the expected ordering. This is required to ensure that automatic updates to changelog and credits are correct. Args: release_summary_lines: list(str). List of lines in ../release_summary.md. Returns: bool. Whether the ordering is correct. """ sections = [ line for line in release_summary_lines if line.startswith('###') ] for section, next_section in EXPECTED_ORDERING_DICT.items(): if section not in sections: python_utils.PRINT( 'Expected release_summary to have %s section to ensure ' 'that automatic updates to changelog and credits are ' 'correct.' % section.strip()) return False index = sections.index(section) if index + 1 >= len(sections) or sections[index + 1] != next_section: python_utils.PRINT( 'Expected %s section to be followed by %s section in ' 'release_summary to ensure that automatic updates to ' 'changelog and credits are correct.' % (section.strip(), next_section.strip())) return False return True
def download_files(source_url_root, target_dir, source_filenames): """Downloads a group of files and saves them to a given directory. Each file is downloaded only if it does not already exist. Args: source_url_root: str. The URL to prepend to all the filenames. target_dir: str. The directory to save the files to. source_filenames: list(str). Each filename is appended to the end of the source_url_root in order to give the URL from which to download the file. The downloaded file is then placed in target_dir, and retains the same filename. """ assert isinstance(source_filenames, list), ( 'Expected list of filenames, got \'%s\'' % source_filenames) common.ensure_directory_exists(target_dir) for filename in source_filenames: if not os.path.exists(os.path.join(target_dir, filename)): python_utils.PRINT( 'Downloading file %s to %s ...' % (filename, target_dir)) urlrequest.urlretrieve( '%s/%s' % (source_url_root, filename), filename=os.path.join(target_dir, filename)) python_utils.PRINT('Download of %s succeeded.' % filename)
def _get_file_extensions(file_extensions_to_lint): """This function is used to return the file extensions which need to be linted and checked. Args: file_extensions_to_lint: list(str). The list of file extensions to be linted and checked. Returns: list(str). The list of all file extensions to be linted and checked. """ all_file_extensions_type = ['js', 'py', 'html', 'css', 'other'] if file_extensions_to_lint: # Check if 'js' and 'ts' both are present in file_extensions_to_lint. js_and_ts_is_present = 'js' in file_extensions_to_lint and ( 'ts' in file_extensions_to_lint) if js_and_ts_is_present: python_utils.PRINT( 'Please use only one of "js" or "ts", as we do not have ' 'separate linters for JS and TS files. If both these options ' 'are used together, then the JS/TS linter will be run twice.') python_utils.PRINT('Exiting...') sys.exit(1) return set(file_extensions_to_lint) return all_file_extensions_type
def main(args=None): """Main method for pre-commit hook that checks files added/modified in a commit. """ parser = argparse.ArgumentParser() parser.add_argument( '--install', action='store_true', default=False, help='Install pre_commit_hook to the .git/hooks dir') args = parser.parse_args(args=args) if args.install: install_hook() return python_utils.PRINT('Running pre-commit check for feconf and constants ...') check_changes_in_config() python_utils.PRINT('Running pre-commit check for package-lock.json ...') if does_diff_include_package_lock_file() and ( does_current_folder_contain_have_package_lock_file()): # The following message is necessary since there git commit aborts # quietly when the status is non-zero. python_utils.PRINT('-----------COMMIT ABORTED-----------') python_utils.PRINT( 'Oppia utilize Yarn to manage node packages. Please delete ' 'package-lock.json, revert the changes in package.json, and use ' 'yarn to add, update, or delete the packages. For more information ' 'on how to use yarn, see https://yarnpkg.com/en/docs/usage.' ) sys.exit(1) return
def compile_protobuf_files(proto_files_paths): """Compiles protobuf files using buf. Raises: Exception. If there is any error in compiling the proto files. """ proto_env = os.environ.copy() proto_env['PATH'] += '%s%s/bin' % (os.pathsep, PROTOC_DIR) proto_env['PATH'] += '%s%s/bin' % (os.pathsep, PROTOC_GEN_TS_PATH) buf_path = os.path.join( BUF_DIR, BUF_DARWIN_FILES[0] if common.is_mac_os() else BUF_LINUX_FILES[0]) for path in proto_files_paths: command = [buf_path, 'generate', path] process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=proto_env) stdout, stderr = process.communicate() if process.returncode == 0: python_utils.PRINT(stdout) else: python_utils.PRINT(stderr) raise Exception('Error compiling proto files at %s' % path) # Since there is no simple configuration for imports when using protobuf to # generate Python files we need to manually fix the imports. # See: https://github.com/protocolbuffers/protobuf/issues/1491 compiled_protobuf_dir = (pathlib.Path( os.path.join(common.CURR_DIR, 'proto_files'))) for p in compiled_protobuf_dir.iterdir(): if p.suffix == '.py': common.inplace_replace_file(p.absolute(), r'^import (\w*_pb2 as)', r'from proto_files import \1')
def download_and_untar_files(source_url, target_parent_dir, tar_root_name, target_root_name): """Downloads a tar file, untars it, and saves the result in a given dir. The download occurs only if the target directory that the tar file untars to does not exist. NB: This function assumes that the root level of the tar file has exactly one folder. Args: source_url: str. The URL from which to download the tar file. target_parent_dir: str. The directory to save the contents of the tar file to. tar_root_name: str. The name of the top-level folder in the tar directory. target_root_name: str. The name that the top-level folder should be renamed to in the local directory. """ if not os.path.exists(os.path.join(target_parent_dir, target_root_name)): python_utils.PRINT('Downloading and untarring file %s to %s ...' % (tar_root_name, target_parent_dir)) common.ensure_directory_exists(target_parent_dir) python_utils.url_retrieve(source_url, filename=TMP_UNZIP_PATH) with contextlib.closing(tarfile.open(name=TMP_UNZIP_PATH, mode='r:gz')) as tfile: tfile.extractall(target_parent_dir) os.remove(TMP_UNZIP_PATH) # Rename the target directory. os.rename(os.path.join(target_parent_dir, tar_root_name), os.path.join(target_parent_dir, target_root_name)) python_utils.PRINT('Download of %s succeeded.' % tar_root_name)
def _get_filepaths_from_path(input_path, namespace=None): """Get paths to all lintable files recursively under a path. This function applies some ignore rules (from .eslintignore) but not all. Args: input_path: str. Path to look for files under. namespace: multiprocessing.Namespace. Namespace in which to execute this function. Returns: list. Paths to lintable files. """ namespace.files = FileCache() file_cache = namespace.files input_path = os.path.join(os.getcwd(), input_path) if not os.path.exists(input_path): python_utils.PRINT('Could not locate file or directory %s. Exiting.' % input_path) python_utils.PRINT('----------------------------------------') sys.exit(1) if os.path.isfile(input_path): return [input_path] else: eslintignore_path = os.path.join(os.getcwd(), '.eslintignore') excluded_glob_patterns = file_cache.readlines(eslintignore_path) return _get_all_files_in_directory(input_path, excluded_glob_patterns)
def _run_pip_command(cmd_parts): """Run pip command with some flags and configs. If it fails try to rerun it with additional flags and else raise an exception. Args: cmd_parts: list(str). List of cmd parts to be run with pip. Raises: Exception. Error installing package. """ # The call to python -m is used to ensure that Python and Pip versions are # compatible. command = [sys.executable, '-m', 'pip'] + cmd_parts process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding='utf-8') stdout, stderr = process.communicate() if process.returncode == 0: python_utils.PRINT(stdout) elif 'can\'t combine user with prefix' in stderr: python_utils.PRINT('Trying by setting --user and --prefix flags.') subprocess.check_call(command + ['--user', '--prefix=', '--system']) else: python_utils.PRINT(stderr) python_utils.PRINT( 'Refer to https://github.com/oppia/oppia/wiki/Troubleshooting') raise Exception('Error installing package')
def compile_and_check_typescript(config_path): """Compiles typescript files and checks the compilation errors. Args: config_path: str. The config that should be used to run the typescript checks. """ node_path = common.NODE_PATH os.environ['PATH'] = '%s/bin:' % node_path + os.environ['PATH'] validate_compiled_js_dir() if os.path.exists(COMPILED_JS_DIR): shutil.rmtree(COMPILED_JS_DIR) python_utils.PRINT('Compiling and testing typescript...') cmd = ['./node_modules/typescript/bin/tsc', '--project', config_path] process = subprocess.Popen(cmd, stdout=subprocess.PIPE, encoding='utf-8') error_messages = [] for line in iter(process.stdout.readline, ''): if not line.startswith('node_modules'): error_messages.append(line) if os.path.exists(COMPILED_JS_DIR): shutil.rmtree(COMPILED_JS_DIR) if error_messages: python_utils.PRINT('Errors found during compilation\n') for message in error_messages: python_utils.PRINT(message, end='') sys.exit(1) else: python_utils.PRINT('Compilation successful!')
def main(): """Test the CI config files and protractor.conf.js to have same e2e test suites. """ python_utils.PRINT( 'Checking all e2e test files are captured ' 'in protractor.conf.js...') protractor_test_suite_files = get_e2e_test_filenames_from_protractor_dir() protractor_conf_test_suites = ( get_e2e_test_filenames_from_protractor_conf_file()) if not protractor_test_suite_files == protractor_conf_test_suites: raise Exception( 'One or more test file from protractor or protractor_desktop ' 'directory is missing from protractor.conf.js') python_utils.PRINT('Done!') python_utils.PRINT( 'Checking e2e tests are captured in CI config files...') protractor_test_suites = get_e2e_suite_names_from_protractor_file() ci_suite_names = get_e2e_suite_names_from_ci_config_file() for excluded_test in TEST_SUITES_NOT_RUN_IN_CI: protractor_test_suites.remove(excluded_test) if not ci_suite_names: raise Exception( 'The e2e test suites that have been extracted from ' 'script section from CI config files are empty.') if not protractor_test_suites: raise Exception( 'The e2e test suites that have been extracted from ' 'protractor.conf.js are empty.') if SAMPLE_TEST_SUITE_THAT_IS_KNOWN_TO_EXIST not in ci_suite_names: raise Exception( '{} is expected to be in the e2e test suites ' 'extracted from the script section of CI config ' 'files, but it is missing.' .format(SAMPLE_TEST_SUITE_THAT_IS_KNOWN_TO_EXIST)) if SAMPLE_TEST_SUITE_THAT_IS_KNOWN_TO_EXIST not in protractor_test_suites: raise Exception( '{} is expected to be in the e2e test suites ' 'extracted from the protractor.conf.js file, ' 'but it is missing.' .format(SAMPLE_TEST_SUITE_THAT_IS_KNOWN_TO_EXIST)) if set(protractor_test_suites) != set(ci_suite_names): raise Exception( 'Protractor test suites and CI test suites are not in sync. ' 'Following suites are not in sync: {}'.format( utils.compute_list_difference( protractor_test_suites, ci_suite_names))) python_utils.PRINT('Done!')
def log(message, show_time=False): """Logs a message to the terminal. If show_time is True, prefixes the message with the current time. """ with LOG_LOCK: if show_time: python_utils.PRINT(datetime.datetime.utcnow().strftime('%H:%M:%S'), message) else: python_utils.PRINT(message)
def update_authors(release_summary_lines): """Updates AUTHORS file. Args: release_summary_lines: list(str). List of lines in ../release_summary.md. """ python_utils.PRINT('Updating AUTHORS file...') new_authors = get_new_authors(release_summary_lines) update_sorted_file(AUTHORS_FILEPATH, new_authors) python_utils.PRINT('Updated AUTHORS file!')
def update_contributors(release_summary_lines): """Updates CONTRIBUTORS file. Args: release_summary_lines: list(str). List of lines in ../release_summary.md. """ python_utils.PRINT('Updating CONTRIBUTORS file...') new_contributors = get_new_contributors(release_summary_lines) update_sorted_file(CONTRIBUTORS_FILEPATH, new_contributors) python_utils.PRINT('Updated CONTRIBUTORS file!')
def main(args=None): """Runs the script to setup GAE.""" unused_parsed_args = _PARSER.parse_args(args=args) sys.path.append('.') sys.path.append(common.GOOGLE_APP_ENGINE_SDK_HOME) sys.path.append(os.path.join(common.OPPIA_TOOLS_DIR, 'webtest-2.0.35')) # Delete old *.pyc files. for directory, _, files in os.walk('.'): for file_name in files: if file_name.endswith('.pyc'): filepath = os.path.join(directory, file_name) os.remove(filepath) python_utils.PRINT( 'Checking whether google-cloud-sdk is installed in %s' % common.GOOGLE_CLOUD_SDK_HOME) if not os.path.exists(common.GOOGLE_CLOUD_SDK_HOME): python_utils.PRINT( 'Downloading Google Cloud SDK (this may take a little while)...') os.makedirs(common.GOOGLE_CLOUD_SDK_HOME) try: # If the google cloud version is updated here, the corresponding # lines (GAE_DIR and GCLOUD_PATH) in assets/release_constants.json # should also be updated. urlrequest.urlretrieve( 'https://dl.google.com/dl/cloudsdk/channels/rapid/downloads/' 'google-cloud-sdk-335.0.0-linux-x86_64.tar.gz', filename='gcloud-sdk.tar.gz') except Exception: python_utils.PRINT('Error downloading Google Cloud SDK. Exiting.') raise Exception('Error downloading Google Cloud SDK.') python_utils.PRINT('Download complete. Installing Google Cloud SDK...') tar = tarfile.open(name='gcloud-sdk.tar.gz') tar.extractall( path=os.path.join( common.OPPIA_TOOLS_DIR, 'google-cloud-sdk-335.0.0/')) tar.close() os.remove('gcloud-sdk.tar.gz') # This command installs specific google cloud components for the google # cloud sdk to prevent the need for developers to install it themselves when # the app engine development server starts up. The --quiet parameter # specifically tells the gcloud program to autofill all prompts with default # values. In this case, that means accepting all installations of gcloud # packages. subprocess.call([ common.GCLOUD_PATH, 'components', 'install', 'beta', 'cloud-datastore-emulator', 'app-engine-python', 'app-engine-python-extras', '--quiet'])
def download_and_unzip_files( source_url, target_parent_dir, zip_root_name, target_root_name): """Downloads a zip file, unzips it, and saves the result in a given dir. The download occurs only if the target directory that the zip file unzips to does not exist. NB: This function assumes that the root level of the zip file has exactly one folder. Args: source_url: str. The URL from which to download the zip file. target_parent_dir: str. The directory to save the contents of the zip file to. zip_root_name: str. The name of the top-level folder in the zip directory. target_root_name: str. The name that the top-level folder should be renamed to in the local directory. """ if not os.path.exists(os.path.join(target_parent_dir, target_root_name)): python_utils.PRINT('Downloading and unzipping file %s to %s ...' % ( zip_root_name, target_parent_dir)) common.ensure_directory_exists(target_parent_dir) urlrequest.urlretrieve(source_url, filename=TMP_UNZIP_PATH) try: with zipfile.ZipFile(TMP_UNZIP_PATH, 'r') as zfile: zfile.extractall(path=target_parent_dir) os.remove(TMP_UNZIP_PATH) except Exception: if os.path.exists(TMP_UNZIP_PATH): os.remove(TMP_UNZIP_PATH) # Some downloads (like jqueryui-themes) may require a user-agent. req = python_utils.url_request(source_url, None, {}) req.add_header('User-agent', 'python') # This is needed to get a seekable filestream that can be used # by zipfile.ZipFile. file_stream = python_utils.string_io( buffer_value=python_utils.url_open(req).read()) with zipfile.ZipFile(file_stream, 'r') as zfile: zfile.extractall(path=target_parent_dir) # Rename the target directory. os.rename( os.path.join(target_parent_dir, zip_root_name), os.path.join(target_parent_dir, target_root_name)) python_utils.PRINT('Download of %s succeeded.' % zip_root_name)
def run_lighthouse_puppeteer_script(): """Runs puppeteer script to collect dynamic urls.""" puppeteer_path = (os.path.join('core', 'tests', 'puppeteer', 'lighthouse_setup.js')) bash_command = [common.NODE_BIN_PATH, puppeteer_path] process = subprocess.Popen(bash_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = process.communicate() if process.returncode == 0: python_utils.PRINT(stdout) for line in stdout.split(b'\n'): # Standard output is in bytes, we need to decode the line to # print it. export_url(line.decode('utf-8')) python_utils.PRINT('Puppeteer script completed successfully.') else: python_utils.PRINT('Return code: %s' % process.returncode) python_utils.PRINT('OUTPUT:') # Standard output is in bytes, we need to decode the line to # print it. python_utils.PRINT(stdout.decode('utf-8')) python_utils.PRINT('ERROR:') # Error output is in bytes, we need to decode the line to # print it. python_utils.PRINT(stderr.decode('utf-8')) python_utils.PRINT( 'Puppeteer script failed. More details can be found above.') sys.exit(1)
def install_npm_library(library_name, version, path): """Installs the npm library after ensuring its not already installed. Args: library_name: str. The library name. version: str. The library version. path: str. The installation path for the library. """ python_utils.PRINT('Checking whether %s is installed in %s' % (library_name, path)) if not os.path.exists(os.path.join(NODE_MODULES_PATH, library_name)): python_utils.PRINT('Installing %s' % library_name) subprocess.check_call( ['yarn', 'add', '%s@%s' % (library_name, version)])
def update_changelog( branch_name, release_summary_lines, current_release_version_number): """Updates CHANGELOG file. Args: branch_name: str. The name of the current branch. release_summary_lines: list(str). List of lines in ../release_summary.md. current_release_version_number: str. The version of current release. """ python_utils.PRINT('Updating Changelog...') start_index = release_summary_lines.index( constants.release_constants.CHANGELOG_HEADER) + 1 end_index = release_summary_lines.index( constants.release_constants.COMMIT_HISTORY_HEADER) release_version_changelog = [ u'v%s (%s)\n' % (current_release_version_number, CURRENT_DATE), u'------------------------\n'] + release_summary_lines[ start_index:end_index] changelog_lines = [] with python_utils.open_file(CHANGELOG_FILEPATH, 'r') as changelog_file: changelog_lines = changelog_file.readlines() if constants.release_constants.BRANCH_TYPE_HOTFIX in branch_name: previous_release_version = get_previous_release_version( constants.release_constants.BRANCH_TYPE_HOTFIX, current_release_version_number) changelog_lines = remove_repetition_from_changelog( current_release_version_number, previous_release_version, changelog_lines) else: previous_release_version = get_previous_release_version( constants.release_constants.BRANCH_TYPE_RELEASE, current_release_version_number) # Update only if changelog is generated before and contains info for # current version. if any( line.startswith( 'v%s' % current_release_version_number ) for line in changelog_lines): changelog_lines = remove_repetition_from_changelog( current_release_version_number, previous_release_version, changelog_lines) changelog_lines[2:2] = release_version_changelog with python_utils.open_file(CHANGELOG_FILEPATH, 'w') as changelog_file: for line in changelog_lines: changelog_file.write(line) python_utils.PRINT('Updated Changelog!')
def ask_user_to_confirm(message): """Asks user to perform a task and confirm once they are done. Args: message: str. The message which specifies the task user has to do. """ while True: python_utils.PRINT( '******************************************************') python_utils.PRINT(message) python_utils.PRINT('Confirm once you are done by entering y/ye/yes.\n') answer = python_utils.INPUT().lower() if answer in AFFIRMATIVE_CONFIRMATIONS: return
def ensure_pip_library_is_installed(package, version, path): """Installs the pip library after ensuring its not already installed. Args: package: str. The package name. version: str. The package version. path: str. The installation path for the package. """ python_utils.PRINT('Checking if %s is installed in %s' % (package, path)) exact_lib_path = os.path.join(path, '%s-%s' % (package, version)) if not os.path.exists(exact_lib_path): python_utils.PRINT('Installing %s' % package) install_backend_python_libs.pip_install('%s==%s' % (package, version), exact_lib_path)
def main(): """Compares the state of the current 'third_party/python_libs' directory to the libraries listed in the 'requirements.txt' file. If there are mismatches, regenerate the 'requirements.txt' file and correct the mismatches. """ verify_pip_is_installed() python_utils.PRINT('Regenerating "requirements.txt" file...') # Calls the script to regenerate requirements. The reason we cannot call the # regenerate requirements functionality inline is because the python script # that regenerates the file is a command-line interface (CLI). Once the CLI # finishes execution, it forces itself and any python scripts in the current # callstack to exit. # Therefore, in order to allow continued execution after the requirements # file is generated, we must call it as a separate process. # The option --no-emit-index-url is specified to prevent pip compile from # generating an index configuration line(s) in requirements.txt when the # local pip configuration uses one or more custom index servers. subprocess.check_call([ 'python', '-m', 'scripts.regenerate_requirements', '--no-emit-index-url', ], stdin=subprocess.PIPE, stdout=subprocess.PIPE) # Adds a note to the beginning of the 'requirements.txt' file to make sure # developers understand that they should not append or change this # autogenerated file. with python_utils.open_file(common.COMPILED_REQUIREMENTS_FILE_PATH, 'r+') as f: content = f.read() f.seek(0, 0) f.write( '# Developers: Please do not modify this auto-generated file. If\n' '# you want to add, remove, upgrade, or downgrade libraries,\n' '# please change the `requirements.in` file, and then follow\n' '# the instructions there to regenerate this file.\n' + content) mismatches = get_mismatches() if mismatches: _rectify_third_party_directory(mismatches) validate_metadata_directories() else: python_utils.PRINT( 'All third-party Python libraries are already installed correctly.' )
def get_refs(): """Returns the ref list taken from STDIN.""" # Git provides refs in STDIN. ref_list = [GitRef(*ref_str.split()) for ref_str in sys.stdin] if ref_list: python_utils.PRINT('ref_list:') pprint.pprint(ref_list) return ref_list
def print_success_message(success_message): """Prints the given success_message in red color. Args: success_message: str. The success message to print. """ # \033[91m is the ANSI escape sequences for green color. python_utils.PRINT('\033[92m' + success_message + '\033[0m')
def print_each_string_after_two_new_lines(strings): """Prints the given strings, separating adjacent strings with two newlines. Args: strings: list(str). The strings to print. """ for string in strings: python_utils.PRINT('%s\n' % string)
def run_webpack_compilation(): """Runs webpack compilation.""" max_tries = 5 webpack_bundles_dir_name = 'webpack_bundles' for _ in range(max_tries): try: with servers.managed_webpack_compiler() as proc: proc.wait() except subprocess.CalledProcessError as error: python_utils.PRINT(error.output) sys.exit(error.returncode) if os.path.isdir(webpack_bundles_dir_name): break if not os.path.isdir(webpack_bundles_dir_name): python_utils.PRINT( 'Failed to complete webpack compilation, exiting...') sys.exit(1)