def setUp(self): super(CustomLintChecksManagerTests, self).setUp() self.verbose_mode_enabled = False self.manifest_file = python_utils.string_io( buffer_value='{\"dependencies\":{\"frontend\":{\"guppy\":' '{\"version\": \"0.1\"},\"skulpt-dist\":{\"version\": \"0.2\"}' ',\"midiJs\":{\"version\": \"0.4\"}}}}') self.package_file = python_utils.string_io( buffer_value='{\"dependencies\":{\"nerdamer\":\"^0.6\"}}') self.files_in_typings_dir = [ 'guppy-defs-0.1.d.ts', 'skulpt-defs-0.2.d.ts', 'midi-defs-0.4.d.ts', 'nerdamer-defs-0.6.d.ts' ] def mock_open_file(path, unused_permissions): if path == other_files_linter.MANIFEST_JSON_FILE_PATH: return self.manifest_file elif path == other_files_linter.PACKAGE_JSON_FILE_PATH: return self.package_file def mock_listdir(unused_path): return self.files_in_typings_dir self.open_file_swap = self.swap(python_utils, 'open_file', mock_open_file) self.listdir_swap = self.swap(os, 'listdir', mock_listdir)
def check_import_order(self): """This function is used to check that each file has imports placed in alphabetical order. Returns: TaskResult. A TaskResult object representing the result of the lint check. """ name = 'Import order' error_messages = [] files_to_check = self.all_filepaths failed = False stdout = python_utils.string_io() with linter_utils.redirect_stdout(stdout): for filepath in files_to_check: # This line prints the error message along with file path # and returns True if it finds an error else returns False. if not isort.api.check_file(filepath, show_diff=True): failed = True if failed: error_message = stdout.getvalue() error_messages.append(error_message) return concurrent_task_utils.TaskResult(name, failed, error_messages, error_messages)
def test_process_html(self): """Test process_html removes whitespaces.""" base_html_source_path = (os.path.join(MOCK_TEMPLATES_DEV_DIR, 'base.html')) build._ensure_files_exist([base_html_source_path]) # pylint: disable=protected-access # Prepare a file_stream object from python_utils.string_io(). minified_html_file_stream = python_utils.string_io() # Assert that base.html has white spaces and has original filepaths. with python_utils.open_file(base_html_source_path, 'r') as source_base_file: source_base_file_content = source_base_file.read() self.assertRegexpMatches( source_base_file_content, r'\s{2,}', msg='No white spaces detected in %s unexpectedly' % base_html_source_path) # Build base.html file. with python_utils.open_file(base_html_source_path, 'r') as source_base_file: build.process_html(source_base_file, minified_html_file_stream) minified_html_file_content = minified_html_file_stream.getvalue() self.assertNotRegexpMatches( minified_html_file_content, r'\s{2,}', msg='All white spaces must be removed from %s' % base_html_source_path)
def test_print_each_string_after_two_new_lines(self): @contextlib.contextmanager def _redirect_stdout(new_target): """Redirect stdout to the new target. Args: new_target: TextIOWrapper. The new target to which stdout is redirected. Yields: TextIOWrapper. The new target. """ old_target = sys.stdout sys.stdout = new_target try: yield new_target finally: sys.stdout = old_target target_stdout = python_utils.string_io() with _redirect_stdout(target_stdout): common.print_each_string_after_two_new_lines( ['These', 'are', 'sample', 'strings.']) self.assertEqual(target_stdout.getvalue(), 'These\n\nare\n\nsample\n\nstrings.\n\n')
def download_and_unzip_files( source_url, target_parent_dir, zip_root_name, target_root_name): """Downloads a zip file, unzips it, and saves the result in a given dir. The download occurs only if the target directory that the zip file unzips to does not exist. NB: This function assumes that the root level of the zip file has exactly one folder. Args: source_url: str. The URL from which to download the zip file. target_parent_dir: str. The directory to save the contents of the zip file to. zip_root_name: str. The name of the top-level folder in the zip directory. target_root_name: str. The name that the top-level folder should be renamed to in the local directory. """ if not os.path.exists(os.path.join(target_parent_dir, target_root_name)): python_utils.PRINT('Downloading and unzipping file %s to %s ...' % ( zip_root_name, target_parent_dir)) common.ensure_directory_exists(target_parent_dir) urlrequest.urlretrieve(source_url, filename=TMP_UNZIP_PATH) try: with zipfile.ZipFile(TMP_UNZIP_PATH, 'r') as zfile: zfile.extractall(path=target_parent_dir) os.remove(TMP_UNZIP_PATH) except Exception: if os.path.exists(TMP_UNZIP_PATH): os.remove(TMP_UNZIP_PATH) # Some downloads (like jqueryui-themes) may require a user-agent. req = python_utils.url_request(source_url, None, {}) req.add_header('User-agent', 'python') # This is needed to get a seekable filestream that can be used # by zipfile.ZipFile. file_stream = python_utils.string_io( buffer_value=python_utils.url_open(req).read()) with zipfile.ZipFile(file_stream, 'r') as zfile: zfile.extractall(path=target_parent_dir) # Rename the target directory. os.rename( os.path.join(target_parent_dir, zip_root_name), os.path.join(target_parent_dir, target_root_name)) python_utils.PRINT('Download of %s succeeded.' % zip_root_name)
def test_managed_webpack_compiler_in_watch_mode_raises_when_not_built( self): # NOTE: The 'Built at: ' message is never printed. self.exit_stack.enter_context( self.swap_popen(outputs=[b'abc', b'def'])) str_io = python_utils.string_io() self.exit_stack.enter_context(contextlib.redirect_stdout(str_io)) self.assertRaisesRegexp( IOError, 'First build never completed', lambda: self.exit_stack.enter_context( servers.managed_webpack_compiler(watch_mode=True))) self.assert_matches_regexps(str_io.getvalue().strip().split('\n'), [ 'Starting new Webpack Compiler', 'abc', 'def', 'Stopping Webpack Compiler', ])
def get_topic_similarities_as_csv(): """Downloads all similarities corresponding to the current topics as a string which contains the contents of a csv file. The first line is a list of the current topics. The next lines are an adjacency matrix of similarities. """ output = python_utils.string_io() writer = csv.writer(output) writer.writerow(RECOMMENDATION_CATEGORIES) topic_similarities = get_topic_similarities_dict() for topic in RECOMMENDATION_CATEGORIES: topic_similarities_row = [ value for _, value in sorted(topic_similarities[topic].items()) ] writer.writerow(topic_similarities_row) return output.getvalue()
def test_join_files(self): """Determine third_party.js contains the content of the first 10 JS files in /third_party/static. """ # Prepare a file_stream object from python_utils.string_io(). third_party_js_stream = python_utils.string_io() # Get all filepaths from dependencies.json. dependency_filepaths = build.get_dependencies_filepaths() # Join and write all JS files in /third_party/static to file_stream. build._join_files(dependency_filepaths['js'], third_party_js_stream) # pylint: disable=protected-access counter = 0 # Only checking first 10 files. js_file_count = 10 for js_filepath in dependency_filepaths['js']: if counter == js_file_count: break with python_utils.open_file(js_filepath, 'r') as js_file: # Assert that each line is copied over to file_stream object. for line in js_file: self.assertIn(line, third_party_js_stream.getvalue()) counter += 1
def test_managed_webpack_compiler_in_watch_mode_when_build_succeeds(self): popen_calls = self.exit_stack.enter_context( self.swap_popen(outputs=[b'abc', b'Built at: 123', b'def'])) str_io = python_utils.string_io() self.exit_stack.enter_context(contextlib.redirect_stdout(str_io)) logs = self.exit_stack.enter_context(self.capture_logging()) proc = self.exit_stack.enter_context( servers.managed_webpack_compiler(watch_mode=True)) self.exit_stack.close() self.assert_proc_was_managed_as_expected(logs, proc.pid) self.assertEqual(len(popen_calls), 1) self.assertIn('--color', popen_calls[0].program_args) self.assertIn('--watch', popen_calls[0].program_args) self.assertIn('--progress', popen_calls[0].program_args) self.assert_matches_regexps(str_io.getvalue().strip().split('\n'), [ 'Starting new Webpack Compiler', 'abc', 'Built at: 123', 'def', 'Stopping Webpack Compiler', ])
def lint_py_files(self): """Prints a list of lint errors in the given list of Python files. Returns: TaskResult. A TaskResult object representing the result of the lint check. """ pylintrc_path = os.path.join(os.getcwd(), '.pylintrc') config_pylint = '--rcfile=%s' % pylintrc_path config_pycodestyle = os.path.join(os.getcwd(), 'tox.ini') files_to_lint = self.all_filepaths errors_found = False error_messages = [] full_error_messages = [] name = 'Pylint' _batch_size = 50 current_batch_start_index = 0 stdout = io.StringIO() while current_batch_start_index < len(files_to_lint): # Note that this index is an exclusive upper bound -- i.e., # the current batch of files ranges from 'start_index' to # 'end_index - 1'. current_batch_end_index = min( current_batch_start_index + _batch_size, len(files_to_lint)) current_files_to_lint = files_to_lint[ current_batch_start_index:current_batch_end_index] pylint_report = python_utils.string_io() pylinter = lint.Run(current_files_to_lint + [config_pylint], reporter=text.TextReporter(pylint_report), exit=False).linter if pylinter.msg_status != 0: lint_message = pylint_report.getvalue() full_error_messages.append(lint_message) pylint_error_messages = ( self.get_trimmed_error_output(lint_message)) error_messages.append(pylint_error_messages) errors_found = True with linter_utils.redirect_stdout(stdout): # These lines invoke Pycodestyle and print its output # to the target stdout. style_guide = pycodestyle.StyleGuide( config_file=config_pycodestyle) pycodestyle_report = style_guide.check_files( paths=current_files_to_lint) if pycodestyle_report.get_count() != 0: error_message = stdout.getvalue() full_error_messages.append(error_message) error_messages.append(error_message) errors_found = True current_batch_start_index = current_batch_end_index return concurrent_task_utils.TaskResult(name, errors_found, error_messages, full_error_messages)
def test_string_io(self): stdout = python_utils.string_io() self.assertIsInstance(stdout, io.StringIO)