def find_plugins(self, plugin_names): """Returns a map from plugin name to plugin jar.""" plugin_names = set(plugin_names) plugins = {} buildroot = get_buildroot() # plugin_jars is the universe of all possible plugins and their transitive deps. # Here we select the ones to actually use. for jar in self.plugin_jars(): with open_jar(jar, 'r') as jarfile: try: with closing(jarfile.open(_PLUGIN_INFO_FILE, 'r')) as plugin_info_file: plugin_info = ElementTree.parse( plugin_info_file).getroot() if plugin_info.tag != 'plugin': raise TaskError( 'File %s in %s is not a valid scalac plugin descriptor' % (_PLUGIN_INFO_FILE, jar)) name = plugin_info.find('name').text if name in plugin_names: if name in plugins: raise TaskError( 'Plugin %s defined in %s and in %s' % (name, plugins[name], jar)) # It's important to use relative paths, as the compiler flags get embedded in the zinc # analysis file, and we port those between systems via the artifact cache. plugins[name] = os.path.relpath(jar, buildroot) except KeyError: pass unresolved_plugins = plugin_names - set(plugins.keys()) if unresolved_plugins: raise TaskError('Could not find requested plugins: %s' % list(unresolved_plugins)) return plugins
def genlang(self, lang, targets): bases, sources = self._calculate_sources(targets) if lang == 'java': output_dir = self.java_out gen_flag = '--java_out' elif lang == 'python': output_dir = self.py_out gen_flag = '--python_out' else: raise TaskError('Unrecognized protobuf gen lang: %s' % lang) safe_mkdir(output_dir) gen = '%s=%s' % (gen_flag, output_dir) args = [self.protobuf_binary, gen] if self.plugins: for plugin in self.plugins: # TODO(Eric Ayers) Is it a good assumption that the generated source output dir is # acceptable for all plugins? args.append("--%s_protobuf_out=%s" % (plugin, output_dir)) for base in bases: args.append('--proto_path=%s' % base) args.extend(sources) log.debug('Executing: %s' % ' '.join(args)) process = subprocess.Popen(args) result = process.wait() if result != 0: raise TaskError('%s ... exited non-zero (%i)' % (self.protobuf_binary, result))
def genlang(self, lang, targets): bases, sources = calculate_compile_roots(targets, self.is_gentarget) if lang == 'java': gen = self.gen_java.gen elif lang == 'python': gen = self.gen_python.gen else: raise TaskError('Unrecognized thrift gen lang: %s' % lang) args = [ self.thrift_binary, '--gen', gen, '-recurse', ] if self.strict: args.append('-strict') if self.verbose: args.append('-verbose') for base in bases: args.extend(('-I', base)) sessions = [] for source in sources: self.context.log.info('Generating thrift for %s\n' % source) # Create a unique session dir for this thrift root. Sources may be full paths but we only # need the path relative to the build root to ensure uniqueness. # TODO(John Sirois): file paths should be normalized early on and uniformly, fix the need to # relpath here at all. relsource = os.path.relpath(source, get_buildroot()) outdir = os.path.join(self.session_dir, '.'.join(relsource.split(os.path.sep))) safe_mkdir(outdir) cmd = args[:] cmd.extend(('-o', outdir)) cmd.append(source) log.debug('Executing: %s' % ' '.join(cmd)) sessions.append( self.ThriftSession(outdir, cmd, subprocess.Popen(cmd))) result = 0 for session in sessions: if result != 0: session.process.kill() else: result = session.process.wait() if result != 0: self.context.log.error('Failed: %s' % ' '.join(session.cmd)) else: _copytree(session.outdir, self.combined_dir) if result != 0: raise TaskError('%s ... exited non-zero (%i)' % (self.thrift_binary, result))
def _get_target(spec, build_graph): try: address = SyntheticAddress(spec) except IOError as e: raise TaskError('Failed to parse address: %s: %s' % (address, e)) match = build_graph.get_target(address) if not match: raise TaskError('Invalid target address: %s' % address) return match
def console_output(self, unused_method_argument): for target in self.context.target_roots: if self._is_jvm(target): for line in self._jvm_dependencies_list(target): yield line elif target.is_python: if self.is_internal_only: raise TaskError('Unsupported option for Python target: is_internal_only: %s' % self.is_internal_only) if self.is_external_only: raise TaskError('Unsupported option for Python target: is_external_only: %s' % self.is_external_only) for line in self._python_dependencies_list(target): yield line
def compile(self, opts, classpath, sources, output_dir, analysis_file, upstream_analysis_files): args = list(opts) # Make a copy args.extend(self._plugin_args()) if upstream_analysis_files: args.extend([ '-analysis-map', ','.join( ['%s:%s' % kv for kv in upstream_analysis_files.items()]) ]) args.extend([ '-analysis-cache', analysis_file, # We add compiler_classpath to ensure the scala-library jar is on the classpath. # TODO: This also adds the compiler jar to the classpath, which compiled code shouldn't # usually need. Be more selective? '-classpath', ':'.join(self._compiler_classpath + classpath), '-d', output_dir ]) args.extend(sources) self.log_zinc_file(analysis_file) if self._run_zinc(args, workunit_labels=[WorkUnit.COMPILER]): raise TaskError('Zinc compile failed.')
def test_execute_code(self): engine = self.RecordingEngine( action=self._throw(TaskError(exit_code=42))) result = engine.execute(self.context, self.as_phases('four', 'five', 'six')) self.assertEqual(42, result) self.assert_attempt(engine, 'four', 'five', 'six')
def __init__(self, context, workdir): super(JUnitRun, self).__init__(context, workdir) self._context = context context.products.require_data('exclusives_groups') # List of FQCN, FQCN#method, sourcefile or sourcefile#method. self.context.products.require_data('classes_by_target') self.context.products.require_data('classes_by_source') task_exports = _TaskExports( classpath=self.classpath, get_base_classpath_for_target=self.get_base_classpath_for_target, register_jvm_tool=self.register_jvm_tool, tool_classpath=self.tool_classpath, workdir=self.workdir) options = self._context.options if options.junit_run_coverage or options.junit_run_coverage_html_open: if options.junit_coverage_processor == 'emma': self._runner = Emma(task_exports, self._context) else: raise TaskError('unknown coverage processor %s' % context.options.junit_coverage_processor) else: self._runner = _JUnitRunner(task_exports, self._context)
def _get_sources_package(self, target): parents = set([os.path.dirname(source) for source in target.sources]) if len(parents) != 1: raise TaskError( 'Antlr sources in multiple directories, cannot infer package.' 'Please set package member in antlr target.') return parents.pop().replace('/', '.')
def compile(self, args, classpath, sources, classes_output_dir, analysis_file): jmake_classpath = self.tool_classpath(self._jmake_bootstrap_key) args = [ '-classpath', ':'.join(classpath + [self._classes_dir]), '-d', self._classes_dir, '-pdb', analysis_file, '-pdb-text-format', ] compiler_classpath = self.tool_classpath(self._compiler_bootstrap_key) args.extend([ '-jcpath', ':'.join(compiler_classpath), '-jcmainclass', 'com.twitter.common.tools.Compiler', ]) args.extend(map(lambda arg: '-C%s' % arg, self._javac_opts)) args.extend(self._args) args.extend(sources) result = self.runjava(classpath=jmake_classpath, main=JavaCompile._JMAKE_MAIN, jvm_options=self._jvm_options, args=args, workunit_name='jmake', workunit_labels=[WorkUnit.COMPILER]) if result: default_message = 'Unexpected error - JMake returned %d' % result raise TaskError(_JMAKE_ERROR_CODES.get(result, default_message))
def execute(self, targets): # For rewriting JDK classes to work, the JAR file has to be listed specifically in # the JAR manifest as something that goes in the bootclasspath. # The MANIFEST list a jar 'allocation.jar' this is why we have to rename it agent_tools_classpath = self.tool_classpath(self._agent_bootstrap_key) agent_jar = agent_tools_classpath[0] allocation_jar = os.path.join(os.path.dirname(agent_jar), "allocation.jar") # TODO(Steve Gury): Find a solution to avoid copying the jar every run and being resilient # to version upgrade shutil.copyfile(agent_jar, allocation_jar) os.environ['ALLOCATION_JAR'] = str(allocation_jar) benchmark_tools_classpath = self.tool_classpath( self._benchmark_bootstrap_key) classpath = self.classpath( benchmark_tools_classpath, confs=self.confs, exclusives_classpath=self.get_base_classpath_for_target( targets[0])) caliper_main = 'com.google.caliper.Runner' exit_code = execute_java(classpath=classpath, main=caliper_main, jvm_options=self.jvm_args, args=self.caliper_args, workunit_factory=self.context.new_workunit, workunit_name='caliper') if exit_code != 0: raise TaskError('java %s ... exited non-zero (%i)' % (caliper_main, exit_code))
def console_output(self, targets): if len(self.context.target_roots) == 0: raise TaskError("One or more target addresses are required.") for target in self.context.target_roots: if self._is_jvm(target): if self.is_graph: for line in self._output_digraph(target): yield line else: for line in self._output_dependency_tree(target): yield line elif target.is_python: raise TaskError('Unsupported for Python targets') else: raise TaskError('Unsupported for target %s' % target)
def execute(self, targets): if self.context.options.scalastyle_skip: self.context.log.debug('Skipping checkstyle.') return check_targets = list() for target in targets: for tgt in target.resolve(): if isinstance(tgt, Target) and tgt.has_sources('.scala'): check_targets.append(tgt) def filter_excludes(filename): if self._excludes: for exclude in self._excludes: if exclude.match(filename): return False return True scala_sources = list() for target in check_targets: def collect(filename): if filename.endswith('.scala'): scala_sources.append(os.path.join(target.target_base, filename)) map(collect, filter(filter_excludes, target.sources)) if scala_sources: def call(srcs): cp = self.tool_classpath(self._scalastyle_bootstrap_key) return self.runjava(classpath=cp, main=Scalastyle._MAIN, args=['-c', self._scalastyle_config] + srcs) result = Xargs(call).execute(scala_sources) if result != 0: raise TaskError('java %s ... exited non-zero (%i)' % (Scalastyle._MAIN, result))
def createtarget(self, lang, gentarget, dependees): if lang == 'java': return self._create_java_target(gentarget, dependees) elif lang == 'python': return self._create_python_target(gentarget, dependees) else: raise TaskError('Unrecognized thrift gen lang: %s' % lang)
def check_clean_master(self, commit=False): if commit: if self.restrict_push_branches: branch = self.scm.branch_name if branch not in self.restrict_push_branches: raise TaskError( 'Can only push from %s, currently on branch: %s' % (' '.join(sorted( self.restrict_push_branches)), branch)) changed_files = self.scm.changed_files() if changed_files: raise TaskError( 'Can only push from a clean branch, found : %s' % ' '.join(changed_files)) else: print('Skipping check for a clean %s in test mode.' % self.scm.branch_name)
def filter_for_type(name): # FIXME(pl): This should be a standard function provided by the plugin/BuildFileParser # machinery try: # Try to do a fully qualified import 1st for filtering on custom types. from_list, module, type_name = name.rsplit('.', 2) module = __import__('%s.%s' % (from_list, module), fromlist=[from_list]) target_type = getattr(module, type_name) except (ImportError, ValueError): # Fall back on pants provided target types. if name not in pants.base.build_file_aliases.target_aliases: raise TaskError('Invalid type name: %s' % name) target_type = pants.base.build_file_aliases.target_aliases[ name] if not issubclass(target_type, Target): raise TaskError('Not a Target type: %s' % name) return lambda target: isinstance(target, target_type)
def __init__(self, context, workdir, **kwargs): super(Dependencies, self).__init__(context, workdir, **kwargs) if (self.context.options.dependencies_is_internal_only and self.context.options.dependencies_is_external_only): error_str = "At most one of %s or %s can be selected." % (self.internal_only_flag, self.external_only_flag) raise TaskError(error_str) self.is_internal_only = self.context.options.dependencies_is_internal_only self.is_external_only = self.context.options.dependencies_is_external_only
def genlang(self, lang, targets): if lang != 'java': raise TaskError('Unrecognized antlr gen lang: %s' % lang) # TODO: Instead of running the compiler for each target, collect the targets # by type and invoke it twice, once for antlr3 and once for antlr4. for target in targets: java_out = self._java_out(target) safe_mkdir(java_out) antlr_classpath = self._classpath_by_compiler[target.compiler] args = ["-o", java_out] if target.compiler == 'antlr3': java_main = 'org.antlr.Tool' elif target.compiler == 'antlr4': args.append('-visitor') # Generate Parse Tree Vistor As Well # Note that this assumes that there is no package set in the antlr file itself, # which is considered an ANTLR best practice. args.append('-package') if target.package is None: args.append(self._get_sources_package(target)) else: args.append(target.package) java_main = 'org.antlr.v4.Tool' else: raise TaskError('Unknown ANTLR compiler: {}'.format( target.compiler)) sources = self._calculate_sources([target]) args.extend(sources) result = self.runjava(classpath=antlr_classpath, main=java_main, args=args, workunit_name='antlr') if result != 0: raise TaskError('java %s ... exited non-zero (%i)' % (java_main, result))
def report(self, targets, tests, junit_classpath): emma_classpath = self._task_exports.tool_classpath( self._emma_bootstrap_key) args = [ 'report', '-in', self._coverage_metadata_file, '-in', self._coverage_file, '-exit' ] source_bases = set() def collect_source_base(target): if self.is_coverage_target(target): source_bases.add(target.target_base) for target in self._test_target_candidates(targets): target.walk(collect_source_base) for source_base in source_bases: args.extend(['-sp', source_base]) sorting = ['-Dreport.sort', '+name,+class,+method,+block'] if self._coverage_report_console: args.extend([ '-r', 'txt', '-Dreport.txt.out.file=%s' % self._coverage_console_file ] + sorting) if self._coverage_report_xml: args.extend([ '-r', 'xml', '-Dreport.xml.out.file=%s' % self._coverage_xml_file ]) if self._coverage_report_html: args.extend([ '-r', 'html', '-Dreport.html.out.file=%s' % self._coverage_html_file, '-Dreport.out.encoding=UTF-8' ] + sorting) main = 'emma' result = execute_java(classpath=emma_classpath, main=main, args=args, workunit_factory=self._context.new_workunit, workunit_name='emma-report') if result != 0: raise TaskError("java %s ... exited non-zero (%i)" " 'failed to generate code coverage reports'" % (main, result)) if self._coverage_report_console: with safe_open(self._coverage_console_file) as console_report: sys.stdout.write(console_report.read()) if self._coverage_report_html_open: binary_util.ui_open(self._coverage_html_file)
def is_gentarget(self, target): if not isinstance(target, JavaThriftLibrary): return False compiler = self.defaults.get_compiler(target) if compiler not in self.compiler_for_name.keys(): return False language = self.defaults.get_language(target) if language not in self.compiler_for_name[compiler].langs: raise TaskError('%s can not generate %s' % (compiler, language)) return True
def map_internal_jars(self, targets): internal_jar_dir = os.path.join(self.gen_project_workdir, 'internal-libs') safe_mkdir(internal_jar_dir, clean=True) internal_source_jar_dir = os.path.join(self.gen_project_workdir, 'internal-libsources') safe_mkdir(internal_source_jar_dir, clean=True) internal_jars = self.context.products.get('jars') internal_source_jars = self.context.products.get('source_jars') for target in targets: mappings = internal_jars.get(target) if mappings: for base, jars in mappings.items(): if len(jars) != 1: raise TaskError( 'Unexpected mapping, multiple jars for %s: %s' % (target, jars)) jar = jars[0] cp_jar = os.path.join(internal_jar_dir, jar) shutil.copy(os.path.join(base, jar), cp_jar) cp_source_jar = None mappings = internal_source_jars.get(target) if mappings: for base, jars in mappings.items(): if len(jars) != 1: raise TaskError( 'Unexpected mapping, multiple source jars for %s: %s' % (target, jars)) jar = jars[0] cp_source_jar = os.path.join( internal_source_jar_dir, jar) shutil.copy(os.path.join(base, jar), cp_source_jar) self._project.internal_jars.add( ClasspathEntry(cp_jar, source_jar=cp_source_jar))
def execute(self, targets): if self.context.options.checkstyle_skip: return targets = filter(Checkstyle._is_checked, targets) with self.invalidated(targets) as invalidation_check: invalid_targets = [] for vt in invalidation_check.invalid_vts: invalid_targets.extend(vt.targets) sources = self.calculate_sources(invalid_targets) if sources: result = self.checkstyle(sources, invalid_targets) if result != 0: raise TaskError('java %s ... exited non-zero (%i)' % (CHECKSTYLE_MAIN, result))
def __init__(self, context, workdir): super(Depmap, self).__init__(context, workdir) if (self.context.options.depmap_is_internal_only and self.context.options.depmap_is_external_only): cls = self.__class__ error_str = "At most one of %s or %s can be selected." % ( cls.internal_only_flag, cls.external_only_flag) raise TaskError(error_str) self.is_internal_only = self.context.options.depmap_is_internal_only self.is_external_only = self.context.options.depmap_is_external_only self.is_minimal = self.context.options.depmap_is_minimal self.is_graph = self.context.options.depmap_is_graph self.separator = self.context.options.depmap_separator
def identify_jars(names, jars): jars_by_name = {} jars_and_filenames = [(x, os.path.basename(x)) for x in jars] for name in names: jar_for_name = None for jar, filename in jars_and_filenames: if filename.startswith(name): jar_for_name = jar break if jar_for_name is None: raise TaskError('Couldn\'t find jar named %s' % name) else: jars_by_name[name] = jar_for_name return jars_by_name
def _run_tests(self, tests, classpath, main, jvm_args=None): # TODO(John Sirois): Integrated batching with the test runner. As things stand we get # results summaries for example for each batch but no overall summary. # http://jira.local.twitter.com/browse/AWESOME-1114 result = 0 for batch in self._partition(tests): with binary_util.safe_args(batch) as batch_tests: result += abs( execute_java(classpath=classpath, main=main, jvm_options=(jvm_args or []) + self._jvm_args, args=self._opts + batch_tests, workunit_factory=self._context.new_workunit, workunit_name='run', workunit_labels=[WorkUnit.TEST])) if result != 0 and self._fail_fast: break if result != 0: raise TaskError('java %s ... exited non-zero (%i)' % (main, result))
def _is_conflicts(self, jar_paths, binary_target): artifacts_by_file_name = defaultdict(set) for jarpath in jar_paths: self.context.log.debug(' scanning %s' % jarpath) with closing(ZipFile(jarpath)) as zip: for file_name in zip.namelist(): jar_name = os.path.basename(jarpath) if (not self._isdir(file_name) ) and Manifest.PATH != file_name: artifacts_by_file_name[file_name].add(jar_name) zip.close() conflicts_by_artifacts = self._get_conflicts_by_artifacts( artifacts_by_file_name) if len(conflicts_by_artifacts) > 0: self._log_conflicts(conflicts_by_artifacts, binary_target) if self.fail_fast: raise TaskError('Failing build for target %s.' % binary_target) return True return False
def instrument(self, targets, tests, junit_classpath): safe_mkdir(self._coverage_instrument_dir, clean=True) emma_classpath = self._task_exports.tool_classpath( self._emma_bootstrap_key) with binary_util.safe_args( self.get_coverage_patterns(targets)) as patterns: args = [ 'instr', '-out', self._coverage_metadata_file, '-d', self._coverage_instrument_dir, '-cp', os.pathsep.join(junit_classpath), '-exit' ] for pattern in patterns: args.extend(['-filter', pattern]) main = 'emma' result = execute_java(classpath=emma_classpath, main=main, args=args, workunit_factory=self._context.new_workunit, workunit_name='emma-instrument') if result != 0: raise TaskError("java %s ... exited non-zero (%i)" " 'failed to instrument'" % (main, result))
def execute(self, targets): # The called binary may block for a while, allow concurrent pants activity during this pants # idle period. # # TODO(John Sirois): refactor lock so that I can do: # with self.context.lock.yield(): # - blocking code # # Currently re-acquiring the lock requires a path argument that was set up by the goal # execution engine. I do not want task code to learn the lock location. # http://jira.local.twitter.com/browse/AWESOME-1317 self.context.lock.release() # Run the first target that is a binary. binaries = filter(is_binary, targets) if len(binaries) > 0: # We only run the first one. main = binaries[0].main egroups = self.context.products.get_data('exclusives_groups') group_key = egroups.get_group_key_for_target(binaries[0]) group_classpath = egroups.get_classpath_for_group(group_key) executor = CommandLineGrabber() if self.only_write_cmd_line else None result = execute_java( classpath=(self.classpath(confs=self.confs, exclusives_classpath=group_classpath)), main=main, executor=executor, jvm_options=self.jvm_args, args=self.args, workunit_factory=self.context.new_workunit, workunit_name='run', workunit_labels=[WorkUnit.RUN] ) if self.only_write_cmd_line: with safe_open(self.only_write_cmd_line, 'w') as outfile: outfile.write(' '.join(executor.cmd)) elif result != 0: raise TaskError('java %s ... exited non-zero (%i)' % (main, result), exit_code=result)
def _validate(defaults, targets): ValidateCompilerConfig = namedtuple('ValidateCompilerConfig', ['language', 'rpc_style']) def compiler_config(tgt): # Note compiler is not present in this signature. At this time # Scrooge and the Apache thrift generators produce identical # java sources, and the Apache generator does not produce scala # sources. As there's no permutation allowing the creation of # incompatible sources with the same language+rpc_style we omit # the compiler from the signature at this time. return ValidateCompilerConfig( language=defaults.get_language(tgt), rpc_style=defaults.get_rpc_style(tgt)) mismatched_compiler_configs = defaultdict(set) for target in filter(lambda t: isinstance(t, JavaThriftLibrary), targets): mycompilerconfig = compiler_config(target) def collect(dep): if mycompilerconfig != compiler_config(dep): mismatched_compiler_configs[target].add(dep) target.walk(collect, predicate=lambda t: isinstance(t, JavaThriftLibrary)) if mismatched_compiler_configs: msg = [ 'Thrift dependency trees must be generated with a uniform compiler configuration.\n\n' ] for tgt in sorted(mismatched_compiler_configs.keys()): msg.append('%s - %s\n' % (tgt, compiler_config(tgt))) for dep in mismatched_compiler_configs[tgt]: msg.append(' %s - %s\n' % (dep, compiler_config(dep))) raise TaskError(''.join(msg))
def run_tests(tests): args = ['--color'] if self.color else [] args.append('--specs=%s' % ','.join(tests)) specs_runner_main = 'com.twitter.common.testing.ExplicitSpecsRunnerMain' bootstrapped_cp = self.tool_classpath( self._specs_bootstrap_key) classpath = self.classpath( bootstrapped_cp, confs=self.confs, exclusives_classpath=self.get_base_classpath_for_target( targets[0])) result = execute_java( classpath=classpath, main=specs_runner_main, jvm_options=self._jvm_options, args=args, workunit_factory=self.context.new_workunit, workunit_name='specs', workunit_labels=[WorkUnit.TEST]) if result != 0: raise TaskError('java %s ... exited non-zero (%i)' % (specs_runner_main, result))