def execution_order(phases): """ Yields goals in execution order for the given phases. Does not account for goals run multiple times due to grouping. """ dependencies_by_goal = OrderedDict() def populate_dependencies(phases): for phase in phases: for goal in phase.goals(): if goal not in dependencies_by_goal: populate_dependencies(goal.dependencies) deps = OrderedSet() for phasedep in goal.dependencies: deps.update(phasedep.goals()) dependencies_by_goal[goal] = deps populate_dependencies(phases) while dependencies_by_goal: for goal, deps in dependencies_by_goal.items(): if not deps: dependencies_by_goal.pop(goal) for _, deps in dependencies_by_goal.items(): if goal in deps: deps.discard(goal) yield goal
def execution_order(phases): """ Yields goals in execution order for the given phases. Does not account for goals run multiple times due to grouping. """ dependencies_by_goal = OrderedDict() def populate_dependencies(phases): for phase in phases: for goal in phase.goals(): if goal not in dependencies_by_goal: populate_dependencies(goal.dependencies) deps = OrderedSet() for phasedep in goal.dependencies: deps.update(phasedep.goals()) dependencies_by_goal[goal] = deps populate_dependencies(phases) while dependencies_by_goal: for goal, deps in dependencies_by_goal.items(): if not deps: dependencies_by_goal.pop(goal) for _, deps in dependencies_by_goal.items(): if goal in deps: deps.discard(goal) yield goal
def compile(self, classpath, sources, output_dir, analysis_cache, upstream_analysis_caches, depfile): # To pass options to scalac simply prefix with -S. args = ['-S' + x for x in self._scalac_args] def analysis_cache_full_path(analysis_cache_product): # We expect the argument to be { analysis_cache_dir, [ analysis_cache_file ]}. if len(analysis_cache_product) != 1: raise TaskError('There can only be one analysis cache file per output directory') analysis_cache_dir, analysis_cache_files = analysis_cache_product.iteritems().next() if len(analysis_cache_files) != 1: raise TaskError('There can only be one analysis cache file per output directory') return os.path.join(analysis_cache_dir, analysis_cache_files[0]) # Strings of <output dir>:<full path to analysis cache file for the classes in that dir>. analysis_map =\ OrderedDict([ (k, analysis_cache_full_path(v)) for k, v in upstream_analysis_caches.itermappings() ]) if len(analysis_map) > 0: args.extend([ '-analysis-map', ','.join(['%s:%s' % kv for kv in analysis_map.items()]) ]) args.extend([ '-analysis-cache', analysis_cache, '-classpath', ':'.join(self._zinc_classpath + classpath), '-output-products', depfile, '-mirror-analysis', '-d', output_dir ]) args.extend(sources) return self.run_zinc(args)
def compile(self, classpath, sources, output_dir, analysis_cache, upstream_analysis_caches, depfile): compiler_classpath = nailgun_profile_classpath(self, self._compile_profile) # To pass options to scalac simply prefix with -S. args = ['-S' + x for x in self._args] def analysis_cache_full_path(analysis_cache_product): # We expect the argument to be { analysis_cache_dir, [ analysis_cache_file ]}. if len(analysis_cache_product) != 1: raise TaskError( 'There can only be one analysis cache file per output directory' ) analysis_cache_dir, analysis_cache_files = analysis_cache_product.iteritems( ).next() if len(analysis_cache_files) != 1: raise TaskError( 'There can only be one analysis cache file per output directory' ) return os.path.join(analysis_cache_dir, analysis_cache_files[0]) # Strings of <output dir>:<full path to analysis cache file for the classes in that dir>. analysis_map = \ OrderedDict([ (k, analysis_cache_full_path(v)) for k, v in upstream_analysis_caches.itermappings() ]) if len(analysis_map) > 0: args.extend([ '-analysis-map', ','.join(['%s:%s' % kv for kv in analysis_map.items()]) ]) zinc_classpath = nailgun_profile_classpath(self, self._zinc_profile) zinc_jars = ScalaCompile.identify_zinc_jars(compiler_classpath, zinc_classpath) for (name, jarpath) in zinc_jars.items( ): # The zinc jar names are also the flag names. args.extend(['-%s' % name, jarpath]) args.extend([ '-analysis-cache', analysis_cache, '-log-level', self.context.options.log_level or 'info', '-classpath', ':'.join(zinc_classpath + classpath), '-output-products', depfile, '-d', output_dir ]) if not self._color: args.append('-no-color') args.extend(sources) self.context.log.debug('Executing: %s %s' % (self._main, ' '.join(args))) return self.runjava(self._main, classpath=zinc_classpath, args=args, jvmargs=self._jvm_args)
def compile(self, classpath, sources, output_dir, analysis_cache, upstream_analysis_caches, depfile): safe_mkdir(output_dir) compiler_classpath = nailgun_profile_classpath(self, self._compile_profile) compiler_args = [] # TODO(John Sirois): separate compiler profile from runtime profile compiler_args.extend([ # Support for outputting a dependencies file of source -> class '-Xplugin:%s' % self.get_depemitter_plugin(), '-P:depemitter:file:%s' % depfile ]) compiler_args.extend(self._args) # To pass options to scalac simply prefix with -S. args = ['-S' + x for x in compiler_args] def analysis_cache_full_path(analysis_cache_product): # We expect the argument to be { analysis_cache_dir, [ analysis_cache_file ]}. if len(analysis_cache_product) != 1: raise TaskError('There can only be one analysis cache file per output directory') analysis_cache_dir, analysis_cache_files = analysis_cache_product.iteritems().next() if len(analysis_cache_files) != 1: raise TaskError('There can only be one analysis cache file per output directory') return os.path.join(analysis_cache_dir, analysis_cache_files[0]) # Strings of <output dir>:<full path to analysis cache file for the classes in that dir>. analysis_map = \ OrderedDict([ (k, analysis_cache_full_path(v)) for k, v in upstream_analysis_caches.itermappings() ]) if len(analysis_map) > 0: args.extend([ '-analysis-map', ','.join(['%s:%s' % kv for kv in analysis_map.items()]) ]) upstream_classes_dirs = analysis_map.keys() zinc_classpath = nailgun_profile_classpath(self, self._zinc_profile) zinc_jars = ScalaCompile.identify_zinc_jars(compiler_classpath, zinc_classpath) for (name, jarpath) in zinc_jars.items(): # The zinc jar names are also the flag names. args.extend(['-%s' % name, jarpath]) args.extend([ '-analysis-cache', analysis_cache, '-log-level', self.context.options.log_level or 'info', '-classpath', ':'.join(zinc_classpath + classpath + upstream_classes_dirs), '-d', output_dir ]) if not self._color: args.append('-no-color') args.extend(sources) self.context.log.debug('Executing: %s %s' % (self._main, ' '.join(args))) return self.runjava(self._main, classpath=zinc_classpath, args=args, jvmargs=self._jvm_args)
def compile(self, classpath, sources, output_dir, analysis_cache, upstream_analysis_caches, depfile): # To pass options to scalac simply prefix with -S. args = ['-S' + x for x in self._args] def analysis_cache_full_path(analysis_cache_product): # We expect the argument to be { analysis_cache_dir, [ analysis_cache_file ]}. if len(analysis_cache_product) != 1: raise TaskError('There can only be one analysis cache file per output directory') analysis_cache_dir, analysis_cache_files = analysis_cache_product.iteritems().next() if len(analysis_cache_files) != 1: raise TaskError('There can only be one analysis cache file per output directory') return os.path.join(analysis_cache_dir, analysis_cache_files[0]) # Strings of <output dir>:<full path to analysis cache file for the classes in that dir>. analysis_map = \ OrderedDict([ (k, analysis_cache_full_path(v)) for k, v in upstream_analysis_caches.itermappings() ]) args.extend(self._zinc_jar_args) if len(analysis_map) > 0: args.extend([ '-analysis-map', ','.join(['%s:%s' % kv for kv in analysis_map.items()]) ]) args.extend([ '-analysis-cache', analysis_cache, '-log-level', self.context.options.log_level or 'info', '-classpath', ':'.join(self._zinc_classpath + classpath), '-output-products', depfile, '-mirror-analysis', '-d', output_dir ]) if not self._color: args.append('-no-color') args.extend(sources) self.context.log.debug('Executing: %s %s' % (self._main, ' '.join(args))) return self.runjava(self._main, classpath=self._zinc_classpath, args=args, jvmargs=self._jvm_args)