def check_ruby(self): executive = Executive() try: result = executive.run_command(['ruby', '--version']) except OSError, e: return False
def run_bad_command(): Executive().run_command(["foo_bar_command_blah"], error_handler=Executive.ignore_error, return_exit_code=True)
def move(self, apply_only=None): """Move Blink source files. Args: apply_only: If it's None, move all affected files. Otherwise, it should be a set of file paths and this function moves only the files in |apply_only|. """ _log.info('Planning renaming ...') file_pairs = plan_blink_move(self._fs, []) if apply_only: file_pairs = [ (src, dest) for (src, dest) in file_pairs if 'third_party/WebKit/' + src.replace('\\', '/') in apply_only ] _log.info('Will move %d files', len(file_pairs)) git = self._create_git() files_set = self._get_checked_in_files(git) for i, (src, dest) in enumerate(file_pairs): src_from_repo = self._fs.join('third_party', 'WebKit', src) if src_from_repo.replace('\\', '/') not in files_set: _log.info('%s is not in the repository', src) continue dest_from_repo = self._fs.join('third_party', 'blink', dest) self._fs.maybe_make_directory(self._repo_root, 'third_party', 'blink', self._fs.dirname(dest)) if self._options.run_git: git.move(src_from_repo, dest_from_repo) _log.info('[%d/%d] Git moved %s', i + 1, len(file_pairs), src) else: self._fs.move(self._fs.join(self._repo_root, src_from_repo), self._fs.join(self._repo_root, dest_from_repo)) _log.info('[%d/%d] Moved %s', i + 1, len(file_pairs), src) if apply_only: return self._update_single_file_content('build/get_landmines.py', [( '\ndef main', ' print \'The Great Blink mv for source files (crbug.com/768828)\'\n\ndef main' )]) _log.info('Run run-bindings-tests ...') Executive().run_command([ 'python', self._fs.join(get_scripts_dir(), 'run-bindings-tests'), '--reset-results' ], cwd=self._repo_root) if self._options.run_git: _log.info('Make a local commit ...') git.commit_locally_with_message( """The Great Blink mv for source files, part 2. Move and rename files. NOAUTOREVERT=true NOPRESUBMIT=true NOTREECHECKS=true Bug: 768828 """)
def __init__(self, platforminfo=None): # We cannot get the PlatformInfo object from a SystemHost because # User is part of SystemHost itself. self._platforminfo = platforminfo or PlatformInfo( sys, platform, Executive())
def __init__(self, port_name=None, options=None, executive=None, user=None, filesystem=None, config=None, **kwargs): # These are default values that should be overridden in a subclasses. self._name = port_name or self.port_name # Subclasses may append a -VERSION (like mac-leopard) or other qualifiers. self._operating_system = 'mac' self._version = '' self._architecture = 'x86' self._graphics_type = 'cpu' # FIXME: Ideally we'd have a package-wide way to get a # well-formed options object that had all of the necessary # options defined on it. self.options = options or DummyOptions() self.executive = executive or Executive() self.user = user or User() self.filesystem = filesystem or system.filesystem.FileSystem() self.config = config or port_config.Config(self.executive, self.filesystem) # FIXME: Remove all of the old "protected" versions when we can. self._options = self.options self._executive = self.executive self._filesystem = self.filesystem self._user = self.user self._config = self.config self._helper = None self._http_server = None self._websocket_server = None self._http_lock = None # FIXME: Why does this live on the port object? # Python's Popen has a bug that causes any pipes opened to a # process that can't be executed to be leaked. Since this # code is specifically designed to tolerate exec failures # to gracefully handle cases where wdiff is not installed, # the bug results in a massive file descriptor leak. As a # workaround, if an exec failure is ever experienced for # wdiff, assume it's not available. This will leak one # file descriptor but that's better than leaking each time # wdiff would be run. # # http://mail.python.org/pipermail/python-list/ # 2008-August/505753.html # http://bugs.python.org/issue3210 self._wdiff_available = None # FIXME: prettypatch.py knows this path, why is it copied here? self._pretty_patch_path = self.path_from_webkit_base( "Websites", "bugs.webkit.org", "PrettyPatch", "prettify.rb") self._pretty_patch_available = None self.set_option_default('configuration', self.default_configuration()) self._test_configuration = None self._multiprocessing_is_available = (multiprocessing is not None) self._results_directory = None
def __init__(self, port, image_path, driver_options, executive=Executive()): self._port = port # FIXME: driver_options is never used. self._image_path = image_path
def run_command(*args, **kwargs): # FIXME: This should not be a global static. # New code should use Executive.run_command directly instead return Executive().run_command(*args, **kwargs)
def test_check_running_pid(self): executive = Executive() self.assertTrue(executive.check_running_pid(os.getpid())) # Maximum pid number on Linux is 32768 by default self.assertFalse(executive.check_running_pid(100000))
def __init__(self, cwd, executive=None, filesystem=None): self.cwd = cwd self.checkout_root = self.find_checkout_root(self.cwd) self.dryrun = False self._executive = executive or Executive() self._filesystem = filesystem or FileSystem()
def bindings_tests(output_directory, verbose): executive = Executive() def diff(filename1, filename2): # Python's difflib module is too slow, especially on long output, so # run external diff(1) command cmd = [ 'diff', '-u', # unified format '-N', # treat absent files as empty filename1, filename2 ] # Return output and don't raise exception, even though diff(1) has # non-zero exit if files differ. return executive.run_command(cmd, error_handler=lambda x: None) def delete_cache_files(): # FIXME: Instead of deleting cache files, don't generate them. cache_files = [ os.path.join(output_directory, output_file) for output_file in os.listdir(output_directory) if (output_file in ( 'lextab.py', # PLY lex 'lextab.pyc', 'parsetab.pickle') or # PLY yacc output_file.endswith('.cache')) ] # Jinja for cache_file in cache_files: os.remove(cache_file) def identical_file(reference_filename, output_filename): reference_basename = os.path.basename(reference_filename) if not os.path.isfile(reference_filename): print 'Missing reference file!' print '(if adding new test, update reference files)' print reference_basename print return False if not filecmp.cmp(reference_filename, output_filename): # cmp is much faster than diff, and usual case is "no differance", # so only run diff if cmp detects a difference print 'FAIL: %s' % reference_basename print diff(reference_filename, output_filename) return False if verbose: print 'PASS: %s' % reference_basename return True def identical_output_files(): file_pairs = [(os.path.join(reference_directory, output_file), os.path.join(output_directory, output_file)) for output_file in os.listdir(output_directory)] return all([ identical_file(reference_filename, output_filename) for (reference_filename, output_filename) in file_pairs ]) def no_excess_files(): generated_files = set(os.listdir(output_directory)) generated_files.add('.svn') # Subversion working copy directory excess_files = [ output_file for output_file in os.listdir(reference_directory) if output_file not in generated_files ] if excess_files: print( 'Excess reference files! ' '(probably cruft from renaming or deleting):\n' + '\n'.join(excess_files)) return False return True try: generate_interface_dependencies() idl_compiler = IdlCompilerV8(output_directory, interfaces_info=interfaces_info, only_if_changed=True) idl_basenames = [ filename for filename in os.listdir(test_input_directory) if (filename.endswith('.idl') and # Dependencies aren't built # (they are used by the dependent) filename not in DEPENDENCY_IDL_FILES) ] for idl_basename in idl_basenames: idl_path = os.path.realpath( os.path.join(test_input_directory, idl_basename)) idl_compiler.compile_file(idl_path) if verbose: print 'Compiled: %s' % filename finally: delete_cache_files() # Detect all changes passed = identical_output_files() passed &= no_excess_files() if passed: if verbose: print print PASS_MESSAGE return 0 print print FAIL_MESSAGE return 1
def __init__(self, host, git_prefix=None, executive=None, cwd=os.getcwd()): self.host = host self.git_prefix = "%s." % git_prefix if git_prefix else "" self.executive = executive or Executive() self.cwd = cwd
def bindings_tests(output_directory, verbose): executive = Executive() def list_files(directory): files = [] for component in os.listdir(directory): if component not in COMPONENT_DIRECTORY: continue directory_with_component = os.path.join(directory, component) for filename in os.listdir(directory_with_component): files.append(os.path.join(directory_with_component, filename)) return files def diff(filename1, filename2): # Python's difflib module is too slow, especially on long output, so # run external diff(1) command cmd = [ 'diff', '-u', # unified format '-N', # treat absent files as empty filename1, filename2 ] # Return output and don't raise exception, even though diff(1) has # non-zero exit if files differ. return executive.run_command(cmd, error_handler=lambda x: None) def is_cache_file(filename): return filename.endswith('.cache') def delete_cache_files(): # FIXME: Instead of deleting cache files, don't generate them. cache_files = [ path for path in list_files(output_directory) if is_cache_file(os.path.basename(path)) ] for cache_file in cache_files: os.remove(cache_file) def identical_file(reference_filename, output_filename): reference_basename = os.path.basename(reference_filename) if not os.path.isfile(reference_filename): print 'Missing reference file!' print '(if adding new test, update reference files)' print reference_basename print return False if not filecmp.cmp(reference_filename, output_filename): # cmp is much faster than diff, and usual case is "no difference", # so only run diff if cmp detects a difference print 'FAIL: %s' % reference_basename print diff(reference_filename, output_filename) return False if verbose: print 'PASS: %s' % reference_basename return True def identical_output_files(output_files): reference_files = [ os.path.join(REFERENCE_DIRECTORY, os.path.relpath(path, output_directory)) for path in output_files ] return all([ identical_file(reference_filename, output_filename) for (reference_filename, output_filename) in zip(reference_files, output_files) ]) def no_excess_files(output_files): generated_files = set( [os.path.relpath(path, output_directory) for path in output_files]) # Add subversion working copy directories in core and modules. for component in COMPONENT_DIRECTORY: generated_files.add(os.path.join(component, '.svn')) excess_files = [] for path in list_files(REFERENCE_DIRECTORY): relpath = os.path.relpath(path, REFERENCE_DIRECTORY) if relpath not in generated_files: excess_files.append(relpath) if excess_files: print( 'Excess reference files! ' '(probably cruft from renaming or deleting):\n' + '\n'.join(excess_files)) return False return True try: generate_interface_dependencies() for component in COMPONENT_DIRECTORY: output_dir = os.path.join(output_directory, component) if not os.path.exists(output_dir): os.makedirs(output_dir) options = IdlCompilerOptions(output_directory=output_dir, impl_output_directory=output_dir, cache_directory=None, target_component=component) if component == 'core': partial_interface_output_dir = os.path.join( output_directory, 'modules') if not os.path.exists(partial_interface_output_dir): os.makedirs(partial_interface_output_dir) partial_interface_options = IdlCompilerOptions( output_directory=partial_interface_output_dir, impl_output_directory=None, cache_directory=None, target_component='modules') idl_filenames = [] dictionary_impl_filenames = [] partial_interface_filenames = [] input_directory = os.path.join(TEST_INPUT_DIRECTORY, component) for filename in os.listdir(input_directory): if (filename.endswith('.idl') and # Dependencies aren't built # (they are used by the dependent) filename not in DEPENDENCY_IDL_FILES): idl_path = os.path.realpath( os.path.join(input_directory, filename)) idl_filenames.append(idl_path) idl_basename = os.path.basename(idl_path) definition_name, _ = os.path.splitext(idl_basename) if definition_name in interfaces_info: interface_info = interfaces_info[definition_name] if interface_info['is_dictionary']: dictionary_impl_filenames.append(idl_path) if component == 'core' and interface_info[ 'dependencies_other_component_full_paths']: partial_interface_filenames.append(idl_path) info_provider = component_info_providers[component] partial_interface_info_provider = component_info_providers[ 'modules'] generate_union_type_containers(CodeGeneratorUnionType, info_provider, options) generate_callback_function_impl(CodeGeneratorCallbackFunction, info_provider, options) generate_bindings(CodeGeneratorV8, info_provider, options, idl_filenames) generate_bindings(CodeGeneratorWebAgentAPI, info_provider, options, idl_filenames) generate_bindings(CodeGeneratorV8, partial_interface_info_provider, partial_interface_options, partial_interface_filenames) generate_dictionary_impl(CodeGeneratorDictionaryImpl, info_provider, options, dictionary_impl_filenames) finally: delete_cache_files() # Detect all changes output_files = list_files(output_directory) passed = identical_output_files(output_files) passed &= no_excess_files(output_files) if passed: if verbose: print print PASS_MESSAGE return 0 print print FAIL_MESSAGE return 1
def __init__(self, tests, is_debug): self.executive = Executive() self.tests = tests self.expected_failure = tests[-1] self.is_debug = is_debug self.webkit_finder = WebKitFinder(FileSystem())
def test_run_command_args_type(self): executive = Executive() self.assertRaises(AssertionError, executive.run_command, "echo") self.assertRaises(AssertionError, executive.run_command, u"echo") executive.run_command(command_line('echo', 'foo')) executive.run_command(tuple(command_line('echo', 'foo')))
def setUp(self): self.executive = Executive() self.fs = FileSystem() self.original_cwd = self.fs.getcwd()
def _assert_windows_image_name(self, name, expected_windows_name): executive = Executive() windows_name = executive._windows_image_name(name) self.assertEqual(windows_name, expected_windows_name)
def test_auto_stringify_args(self): executive = Executive() executive.run_command(command_line('echo', 1)) executive.popen(command_line('echo', 1), stdout=executive.PIPE).wait() self.assertEqual('echo 1', executive.command_for_printing(['echo', 1]))
def bindings_tests(output_directory, verbose): executive = Executive() def list_files(directory): files = [] for component in os.listdir(directory): if component not in COMPONENT_DIRECTORY: continue directory_with_component = os.path.join(directory, component) for filename in os.listdir(directory_with_component): files.append(os.path.join(directory_with_component, filename)) return files def diff(filename1, filename2): # Python's difflib module is too slow, especially on long output, so # run external diff(1) command cmd = [ 'diff', '-u', # unified format '-N', # treat absent files as empty filename1, filename2 ] # Return output and don't raise exception, even though diff(1) has # non-zero exit if files differ. return executive.run_command(cmd, error_handler=lambda x: None) def is_cache_file(filename): if filename in PLY_LEX_YACC_FILES: return True if filename.endswith('.cache'): # Jinja return True return False def delete_cache_files(): # FIXME: Instead of deleting cache files, don't generate them. cache_files = [ path for path in list_files(output_directory) if is_cache_file(os.path.basename(path)) ] for cache_file in cache_files: os.remove(cache_file) def identical_file(reference_filename, output_filename): reference_basename = os.path.basename(reference_filename) if not os.path.isfile(reference_filename): print 'Missing reference file!' print '(if adding new test, update reference files)' print reference_basename print return False if not filecmp.cmp(reference_filename, output_filename): # cmp is much faster than diff, and usual case is "no differance", # so only run diff if cmp detects a difference print 'FAIL: %s' % reference_basename print diff(reference_filename, output_filename) return False if verbose: print 'PASS: %s' % reference_basename return True def identical_output_files(output_files): reference_files = [ os.path.join(reference_directory, os.path.relpath(path, output_directory)) for path in output_files ] return all([ identical_file(reference_filename, output_filename) for (reference_filename, output_filename) in zip(reference_files, output_files) ]) def no_excess_files(output_files): generated_files = set( [os.path.relpath(path, output_directory) for path in output_files]) # Add subversion working copy directories in core and modules. for component in COMPONENT_DIRECTORY: generated_files.add(os.path.join(component, '.svn')) excess_files = [] for path in list_files(reference_directory): relpath = os.path.relpath(path, reference_directory) if relpath not in generated_files: excess_files.append(relpath) if excess_files: print( 'Excess reference files! ' '(probably cruft from renaming or deleting):\n' + '\n'.join(excess_files)) return False return True try: generate_interface_dependencies() for component in COMPONENT_DIRECTORY: output_dir = os.path.join(output_directory, component) if not os.path.exists(output_dir): os.makedirs(output_dir) idl_compiler = IdlCompilerV8(output_dir, interfaces_info=interfaces_info, only_if_changed=True) dictionary_impl_compiler = IdlCompilerDictionaryImpl( output_dir, interfaces_info=interfaces_info, only_if_changed=True) idl_filenames = [] input_directory = os.path.join(test_input_directory, component) for filename in os.listdir(input_directory): if (filename.endswith('.idl') and # Dependencies aren't built # (they are used by the dependent) filename not in DEPENDENCY_IDL_FILES): idl_filenames.append( os.path.realpath( os.path.join(input_directory, filename))) for idl_path in idl_filenames: idl_basename = os.path.basename(idl_path) idl_compiler.compile_file(idl_path) definition_name, _ = os.path.splitext(idl_basename) if (definition_name in interfaces_info and interfaces_info[definition_name]['is_dictionary']): dictionary_impl_compiler.compile_file(idl_path) if verbose: print 'Compiled: %s' % idl_path finally: delete_cache_files() # Detect all changes output_files = list_files(output_directory) passed = identical_output_files(output_files) passed &= no_excess_files(output_files) if passed: if verbose: print print PASS_MESSAGE return 0 print print FAIL_MESSAGE return 1
def test_popen_args(self): executive = Executive() # Explicitly naming the 'args' argument should not throw an exception. executive.popen(args=command_line('echo', 1), stdout=executive.PIPE).wait()
def makeArgs(self): # FIXME: This shouldn't use a static Executive(). args = '--makeargs="-j%s"' % Executive().cpu_count() if os.environ.has_key('MAKEFLAGS'): args = '--makeargs="%s"' % os.environ['MAKEFLAGS'] return args
def test_running_pids(self): executive = Executive() pids = executive.running_pids() self.assertIn(os.getpid(), pids)
def detect_scm_system(path, patch_directories=None): return SCMDetector(FileSystem(), Executive()).detect_scm_system(path, patch_directories)
def test_run_in_parallel_assert_nonempty(self): self.assertRaises(AssertionError, Executive().run_in_parallel, [])
def timestamp_of_revision(self, path, revision): # We use --xml to get timestamps like 2013-02-08T08:18:04.964409Z repository_root = self.value_from_svn_info(self.checkout_root, 'Repository Root') info_output = Executive().run_command([self.executable_name, 'log', '-r', revision, '--xml', repository_root], cwd=path).rstrip() match = re.search(r"^<date>(?P<value>.+)</date>\r?$", info_output, re.MULTILINE) return match.group('value')
def test_run_command_args_type(self): executive = Executive() self.assertRaises(AssertionError, executive.run_command, "echo") self.assertRaises(AssertionError, executive.run_command, u"echo") executive.run_command(["echo", "foo"]) executive.run_command(("echo", "foo"))