class ScalaCompile(JvmCompile): _language = 'scala' _file_suffix = '.scala' _config_section = 'scala-compile' @classmethod def setup_parser(cls, option_group, args, mkflag): super(ScalaCompile, cls).setup_parser(option_group, args, mkflag) option_group.add_option(mkflag('plugins'), dest='plugins', default=None, action='append', help='Use these scalac plugins. Default is set in pants.ini.') def __init__(self, context, workdir): super(ScalaCompile, self).__init__(context, workdir, jdk=False) # Set up the zinc utils. color = not context.options.no_color self._zinc_utils = ZincUtils(context=context, nailgun_task=self, jvm_options=self._jvm_options, color=color) @property def config_section(self): return self._config_section def create_analysis_tools(self): return AnalysisTools(self.context, ZincAnalysisParser(self._classes_dir), ZincAnalysis) def extra_compile_time_classpath_elements(self): # Classpath entries necessary for our compiler plugins. return self._zinc_utils.plugin_jars() # Invalidate caches if the toolchain changes. def invalidate_for(self): zinc_invalidation_key = self._zinc_utils.invalidate_for() jvm_target_version = None # Check scalac args for jvm target version. for arg in self._args: if arg.strip().startswith("-S-target:"): jvm_target_version = arg.strip() return [jvm_target_version, zinc_invalidation_key] def extra_products(self, target): ret = [] if target.is_scalac_plugin and target.classname: root, plugin_info_file = ZincUtils.write_plugin_info(self._resources_dir, target) ret.append((root, [plugin_info_file])) return ret def compile(self, args, classpath, sources, classes_output_dir, analysis_file): # We have to treat our output dir as an upstream element, so zinc can find valid # analysis for previous partitions. We use the global valid analysis for the upstream. upstream = ({classes_output_dir: self._analysis_file} if os.path.exists(self._analysis_file) else {}) return self._zinc_utils.compile(args, classpath + [self._classes_dir], sources, classes_output_dir, analysis_file, upstream)
def register_options(cls, register): super(ScalaCompile, cls).register_options(register) # Note: Used in ZincUtils. # TODO: Revisit this. It's unintuitive for ZincUtils to reach back into the task for options. register('--plugins', action='append', help='Use these scalac plugins.') ZincUtils.register_options(register, cls.register_jvm_tool)
def __init__(self, context, workdir): super(ScalaCompile, self).__init__(context, workdir, jdk=False) # Set up the zinc utils. color = not context.options.no_color self._zinc_utils = ZincUtils(context=context, nailgun_task=self, jvm_options=self._jvm_options, color=color)
def register_options(cls, register): super(ScalaCompile, cls).register_options(register) # Note: Used in ZincUtils. # TODO: Revisit this. It's unintuitive for ZincUtils to reach back into the task for options. register('--plugins', action='append', help='Use these scalac plugins.') register('--plugin-args', advanced=True, type=Options.dict, default={}, help='Map from plugin name to list of arguments for that plugin.') register('--name-hashing', action='store_true', default=False, help='Use zinc name hashing.') ZincUtils.register_options(register, cls.register_jvm_tool)
def __init__(self, *args, **kwargs): super(ScalaCompile, self).__init__(*args, **kwargs) # Set up the zinc utils. color = self.get_options().colors self._zinc_utils = ZincUtils(context=self.context, nailgun_task=self, jvm_options=self._jvm_options, color=color, log_level=self.get_options().level)
def __init__(self, *args, **kwargs): super(ScalaCompile, self).__init__(*args, **kwargs) # Set up the zinc utils. color = not self.context.options.no_color self._zinc_utils = ZincUtils(context=self.context, nailgun_task=self, jvm_options=self._jvm_options, color=color) self.configure_args( args_defaults=_SCALA_COMPILE_ARGS_DEFAULT, warning_defaults=_SCALA_COMPILE_WARNING_ARGS_DEFAULT, no_warning_defaults=_SCALA_COMPILE_WARNING_ARGS_DEFAULT)
def extra_products(self, target): ret = [] if target.is_scalac_plugin and target.classname: root, plugin_info_file = ZincUtils.write_plugin_info( self._resources_dir, target) ret.append((root, [plugin_info_file])) return ret
def __init__(self, *args, **kwargs): super(ScalaCompile, self).__init__(*args, **kwargs) # Set up the zinc utils. color = not self.get_options().no_colors self._zinc_utils = ZincUtils(context=self.context, nailgun_task=self, jvm_options=self._jvm_options, color=color, log_level=self.get_options().level)
def extra_products(self, target): """Override extra_products to produce a plugin information file.""" ret = [] if target.is_scalac_plugin and target.classname: # NB: We don't yet support explicit in-line compilation of scala compiler plugins from # the workspace to be used in subsequent compile rounds like we do for annotation processors # with javac. This would require another GroupTask similar to AptCompile, but for scala. root, plugin_info_file = ZincUtils.write_plugin_info(self._plugin_info_dir, target) ret.append((root, [plugin_info_file])) return ret
def __init__(self, *args, **kwargs): super(ScalaCompile, self).__init__(*args, **kwargs) # Set up the zinc utils. color = not self.context.options.no_color self._zinc_utils = ZincUtils(context=self.context, nailgun_task=self, jvm_options=self._jvm_options, color=color) self.configure_args(args_defaults=_SCALA_COMPILE_ARGS_DEFAULT, warning_defaults=_SCALA_COMPILE_WARNING_ARGS_DEFAULT, no_warning_defaults=_SCALA_COMPILE_WARNING_ARGS_DEFAULT)
def __init__(self, *args, **kwargs): super(ScalaCompile, self).__init__(*args, **kwargs) # Set up the zinc utils. color = self.get_options().colors self._zinc_utils = ZincUtils(context=self.context, nailgun_task=self, jvm_options=self._jvm_options, color=color, log_level=self.get_options().level) # A directory independent of any other classpath which can contain per-target # plugin resource files. self._plugin_info_dir = os.path.join(self.workdir, 'scalac-plugin-info')
def test_get_compile_args(self): classpath = [os.path.join(self.build_root, 'foo.jar'), '/outside-build-root/bar.jar'] sources = ['X.scala'] args = ZincUtils._get_compile_args([], classpath, sources, 'bogus output dir', 'bogus analysis file', []) classpath_found = False classpath_correct = False for arg in args: if classpath_found: self.assertEquals('foo.jar:/outside-build-root/bar.jar', arg) classpath_correct = True break if arg == '-classpath': classpath_found = True self.assertTrue(classpath_correct)
def test_get_compile_args(self): jar_outside_build_root = os.path.join(os.path.sep, 'outside-build-root', 'bar.jar') classpath = [os.path.join(self.build_root, 'foo.jar'), jar_outside_build_root] sources = ['X.scala'] args = ZincUtils._get_compile_args([], classpath, sources, 'bogus output dir', 'bogus analysis file', []) classpath_found = False classpath_correct = False for arg in args: if classpath_found: # Classpath elements are always relative to the build root. jar_relpath = os.path.relpath(jar_outside_build_root, self.build_root) self.assertEquals('foo.jar:{0}'.format(jar_relpath), arg) classpath_correct = True break if arg == '-classpath': classpath_found = True self.assertTrue(classpath_correct)
def test_get_compile_args(self): jar_outside_build_root = os.path.join(os.path.sep, 'outside-build-root', 'bar.jar') classpath = [ os.path.join(self.build_root, 'foo.jar'), jar_outside_build_root ] sources = ['X.scala'] args = ZincUtils._get_compile_args([], classpath, sources, 'bogus output dir', 'bogus analysis file', []) classpath_found = False classpath_correct = False for arg in args: if classpath_found: # Classpath elements are always relative to the build root. jar_relpath = os.path.relpath(jar_outside_build_root, self.build_root) self.assertEquals('foo.jar:{0}'.format(jar_relpath), arg) classpath_correct = True break if arg == '-classpath': classpath_found = True self.assertTrue(classpath_correct)
class ScalaCompile(JvmCompile): _language = 'scala' _file_suffix = '.scala' _config_section = 'scala-compile' @classmethod def register_options(cls, register): super(ScalaCompile, cls).register_options(register) # Note: Used in ZincUtils. # TODO: Revisit this. It's unintuitive for ZincUtils to reach back into the task for options. register('--plugins', default=None, action='append', help='Use these scalac plugins.', legacy='plugins') def __init__(self, *args, **kwargs): super(ScalaCompile, self).__init__(*args, **kwargs) # Set up the zinc utils. color = not self.context.options.no_color self._zinc_utils = ZincUtils(context=self.context, nailgun_task=self, jvm_options=self._jvm_options, color=color) self.configure_args(args_defaults=_SCALA_COMPILE_ARGS_DEFAULT, warning_defaults=_SCALA_COMPILE_WARNING_ARGS_DEFAULT, no_warning_defaults=_SCALA_COMPILE_WARNING_ARGS_DEFAULT) @property def config_section(self): return self._config_section def create_analysis_tools(self): return AnalysisTools(self.context, ZincAnalysisParser(self._classes_dir), ZincAnalysis) def extra_compile_time_classpath_elements(self): # Classpath entries necessary for our compiler plugins. return self._zinc_utils.plugin_jars() # Invalidate caches if the toolchain changes. def platform_version_info(self): zinc_invalidation_key = self._zinc_utils.platform_version_info() jvm_target_version = '' # Check scalac args for jvm target version. for arg in self._args: if arg.strip().startswith("-S-target:"): jvm_target_version = arg.strip() zinc_invalidation_key.append(jvm_target_version) return zinc_invalidation_key def extra_products(self, target): ret = [] if target.is_scalac_plugin and target.classname: root, plugin_info_file = ZincUtils.write_plugin_info(self._resources_dir, target) ret.append((root, [plugin_info_file])) return ret def compile(self, args, classpath, sources, classes_output_dir, analysis_file): # We have to treat our output dir as an upstream element, so zinc can find valid # analysis for previous partitions. We use the global valid analysis for the upstream. upstream = ({classes_output_dir: self._analysis_file} if os.path.exists(self._analysis_file) else {}) return self._zinc_utils.compile(args, classpath + [self._classes_dir], sources, classes_output_dir, analysis_file, upstream)
class ScalaCompile(JvmCompile): _language = 'scala' _file_suffix = '.scala' _config_section = 'scala-compile' @classmethod def setup_parser(cls, option_group, args, mkflag): super(ScalaCompile, cls).setup_parser(option_group, args, mkflag) option_group.add_option( mkflag('plugins'), dest='plugins', default=None, action='append', help='Use these scalac plugins. Default is set in pants.ini.') def __init__(self, *args, **kwargs): super(ScalaCompile, self).__init__(*args, **kwargs) # Set up the zinc utils. color = not self.context.options.no_color self._zinc_utils = ZincUtils(context=self.context, nailgun_task=self, jvm_options=self._jvm_options, color=color) self.configure_args( args_defaults=_SCALA_COMPILE_ARGS_DEFAULT, warning_defaults=_SCALA_COMPILE_WARNING_ARGS_DEFAULT, no_warning_defaults=_SCALA_COMPILE_WARNING_ARGS_DEFAULT) @property def config_section(self): return self._config_section def create_analysis_tools(self): return AnalysisTools(self.context, ZincAnalysisParser(self._classes_dir), ZincAnalysis) def extra_compile_time_classpath_elements(self): # Classpath entries necessary for our compiler plugins. return self._zinc_utils.plugin_jars() # Invalidate caches if the toolchain changes. def platform_version_info(self): zinc_invalidation_key = self._zinc_utils.platform_version_info() jvm_target_version = '' # Check scalac args for jvm target version. for arg in self._args: if arg.strip().startswith("-S-target:"): jvm_target_version = arg.strip() zinc_invalidation_key.append(jvm_target_version) return zinc_invalidation_key def extra_products(self, target): ret = [] if target.is_scalac_plugin and target.classname: root, plugin_info_file = ZincUtils.write_plugin_info( self._resources_dir, target) ret.append((root, [plugin_info_file])) return ret def compile(self, args, classpath, sources, classes_output_dir, analysis_file): # We have to treat our output dir as an upstream element, so zinc can find valid # analysis for previous partitions. We use the global valid analysis for the upstream. upstream = ({ classes_output_dir: self._analysis_file } if os.path.exists(self._analysis_file) else {}) return self._zinc_utils.compile(args, classpath + [self._classes_dir], sources, classes_output_dir, analysis_file, upstream)
class ScalaCompile(JvmCompile): _language = 'scala' _file_suffix = '.scala' @classmethod def get_args_default(cls, bootstrap_option_values): return ('-S-encoding', '-SUTF-8', '-S-g:vars') @classmethod def get_warning_args_default(cls): return ('-S-deprecation', '-S-unchecked') @classmethod def get_no_warning_args_default(cls): return ('-S-nowarn', ) @classmethod def register_options(cls, register): super(ScalaCompile, cls).register_options(register) # Note: Used in ZincUtils. # TODO: Revisit this. It's unintuitive for ZincUtils to reach back into the task for options. register('--plugins', action='append', help='Use these scalac plugins.') register('--name-hashing', action='store_true', default=False, help='Use zinc name hashing.') ZincUtils.register_options(register, cls.register_jvm_tool) def __init__(self, *args, **kwargs): super(ScalaCompile, self).__init__(*args, **kwargs) # Set up the zinc utils. color = self.get_options().colors self._zinc_utils = ZincUtils(context=self.context, nailgun_task=self, jvm_options=self._jvm_options, color=color, log_level=self.get_options().level) def create_analysis_tools(self): return AnalysisTools(self.context.java_home, self.ivy_cache_dir, ZincAnalysisParser(self._classes_dir), ZincAnalysis) def extra_compile_time_classpath_elements(self): # Classpath entries necessary for our compiler plugins. return self._zinc_utils.plugin_jars() # Invalidate caches if the toolchain changes. def platform_version_info(self): zinc_invalidation_key = self._zinc_utils.platform_version_info() jvm_target_version = '' # Check scalac args for jvm target version. for arg in self._args: if arg.strip().startswith("-S-target:"): jvm_target_version = arg.strip() zinc_invalidation_key.append(jvm_target_version) return zinc_invalidation_key def extra_products(self, target): ret = [] if target.is_scalac_plugin and target.classname: root, plugin_info_file = ZincUtils.write_plugin_info( self._resources_dir, target) ret.append((root, [plugin_info_file])) return ret def compile(self, args, classpath, sources, classes_output_dir, analysis_file): # We have to treat our output dir as an upstream element, so zinc can find valid # analysis for previous partitions. We use the global valid analysis for the upstream. upstream = ({ classes_output_dir: self._analysis_file } if os.path.exists(self._analysis_file) else {}) return self._zinc_utils.compile(args, classpath + [self._classes_dir], sources, classes_output_dir, analysis_file, upstream)
class ScalaCompile(JvmCompile): _language = 'scala' _file_suffix = '.scala' @classmethod def get_args_default(cls, bootstrap_option_values): return ('-S-encoding', '-SUTF-8','-S-g:vars') @classmethod def get_warning_args_default(cls): return ('-S-deprecation', '-S-unchecked') @classmethod def get_no_warning_args_default(cls): return ('-S-nowarn',) @classmethod def register_options(cls, register): super(ScalaCompile, cls).register_options(register) # Note: Used in ZincUtils. # TODO: Revisit this. It's unintuitive for ZincUtils to reach back into the task for options. register('--plugins', action='append', help='Use these scalac plugins.') register('--plugin-args', advanced=True, type=Options.dict, default={}, help='Map from plugin name to list of arguments for that plugin.') register('--name-hashing', action='store_true', default=False, help='Use zinc name hashing.') ZincUtils.register_options(register, cls.register_jvm_tool) def __init__(self, *args, **kwargs): super(ScalaCompile, self).__init__(*args, **kwargs) # Set up the zinc utils. color = self.get_options().colors self._zinc_utils = ZincUtils(context=self.context, nailgun_task=self, jvm_options=self._jvm_options, color=color, log_level=self.get_options().level) # A directory independent of any other classpath which can contain per-target # plugin resource files. self._plugin_info_dir = os.path.join(self.workdir, 'scalac-plugin-info') def create_analysis_tools(self): return AnalysisTools(self.context.java_home, ZincAnalysisParser(), ZincAnalysis) def extra_compile_time_classpath_elements(self): # Classpath entries necessary for our compiler plugins. return self._zinc_utils.plugin_jars() # Invalidate caches if the toolchain changes. def _language_platform_version_info(self): zinc_invalidation_key = self._zinc_utils.platform_version_info() # Invalidate if any compiler args change. # Note that while some args are obviously important for invalidation (e.g., the jvm target # version), some might not be. However we must invalidated on all the args, because Zinc # ignores analysis files if the compiler args they were created with are different from the # current ones, and does a full recompile. So if we allow cached artifacts with those analysis # files to be used, Zinc will do unnecessary full recompiles on subsequent edits. zinc_invalidation_key.extend(self._args) # Invalidate if use of name hashing changes. zinc_invalidation_key.append( 'name-hashing-{0}'.format('on' if self.get_options().name_hashing else 'off')) return zinc_invalidation_key def extra_products(self, target): """Override extra_products to produce a plugin information file.""" ret = [] if target.is_scalac_plugin and target.classname: # NB: We don't yet support explicit in-line compilation of scala compiler plugins from # the workspace to be used in subsequent compile rounds like we do for annotation processors # with javac. This would require another GroupTask similar to AptCompile, but for scala. root, plugin_info_file = ZincUtils.write_plugin_info(self._plugin_info_dir, target) ret.append((root, [plugin_info_file])) return ret def compile(self, args, classpath, sources, classes_output_dir, upstream_analysis, analysis_file): return self._zinc_utils.compile(args, classpath, sources, classes_output_dir, analysis_file, upstream_analysis)
def extra_products(self, target): ret = [] if target.is_scalac_plugin and target.classname: root, plugin_info_file = ZincUtils.write_plugin_info(self._resources_dir, target) ret.append((root, [plugin_info_file])) return ret
class ZincCompile(JvmCompile): _supports_concurrent_execution = True @staticmethod def write_plugin_info(resources_dir, target): root = os.path.join(resources_dir, target.id) plugin_info_file = os.path.join(root, _PLUGIN_INFO_FILE) with safe_open(plugin_info_file, 'w') as f: f.write(textwrap.dedent(""" <plugin> <name>{}</name> <classname>{}</classname> </plugin> """.format(target.plugin, target.classname)).strip()) return root, plugin_info_file @classmethod def global_subsystems(cls): return super(ZincCompile, cls).global_subsystems() + (ScalaPlatform, ) @classmethod def get_args_default(cls, bootstrap_option_values): return ('-S-encoding', '-SUTF-8','-S-g:vars') @classmethod def get_warning_args_default(cls): return ('-S-deprecation', '-S-unchecked') @classmethod def get_no_warning_args_default(cls): return ('-S-nowarn',) @classmethod def register_options(cls, register): super(ZincCompile, cls).register_options(register) register('--plugins', action='append', help='Use these scalac plugins.') register('--plugin-args', advanced=True, type=Options.dict, default={}, help='Map from plugin name to list of arguments for that plugin.') register('--name-hashing', action='store_true', default=False, help='Use zinc name hashing.') cls.register_jvm_tool(register, 'zinc') cls.register_jvm_tool(register, 'plugin-jars') def __init__(self, *args, **kwargs): super(ZincCompile, self).__init__(*args, **kwargs) # Set up the zinc utils. color = self.get_options().colors self._zinc_utils = ZincUtils(context=self.context, nailgun_task=self, jvm_options=self._jvm_options, color=color, log_level=self.get_options().level) # A directory independent of any other classpath which can contain per-target # plugin resource files. self._plugin_info_dir = os.path.join(self.workdir, 'scalac-plugin-info') self._lazy_plugin_args = None def create_analysis_tools(self): return AnalysisTools(self.context.java_home, ZincAnalysisParser(), ZincAnalysis) def zinc_classpath(self): return self.tool_classpath('zinc') def compiler_classpath(self): return ScalaPlatform.global_instance().compiler_classpath(self.context.products) def extra_compile_time_classpath_elements(self): # Classpath entries necessary for our compiler plugins. return self.plugin_jars() def plugin_jars(self): """The classpath entries for jars containing code for enabled plugins.""" if self.get_options().plugins: return self.tool_classpath('plugin-jars') else: return [] def plugin_args(self): if self._lazy_plugin_args is None: self._lazy_plugin_args = self._create_plugin_args() return self._lazy_plugin_args def name_hashing(self): return self.get_options().name_hashing def _create_plugin_args(self): if not self.get_options().plugins: return [] plugin_args = self.get_options().plugin_args active_plugins = self.find_plugins() ret = [] for name, jar in active_plugins.items(): ret.append('-S-Xplugin:{}'.format(jar)) for arg in plugin_args.get(name, []): ret.append('-S-P:{}:{}'.format(name, arg)) return ret def _find_plugins(self): """Returns a map from plugin name to plugin jar.""" # Allow multiple flags and also comma-separated values in a single flag. plugin_names = set([p for val in self.get_options().plugins for p in val.split(',')]) plugins = {} buildroot = get_buildroot() for jar in self.plugin_jars(): with open_zip(jar, 'r') as jarfile: try: with closing(jarfile.open(_PLUGIN_INFO_FILE, 'r')) as plugin_info_file: plugin_info = ElementTree.parse(plugin_info_file).getroot() if plugin_info.tag != 'plugin': raise TaskError( 'File {} in {} is not a valid scalac plugin descriptor'.format(_PLUGIN_INFO_FILE, jar)) name = plugin_info.find('name').text if name in plugin_names: if name in plugins: raise TaskError('Plugin {} defined in {} and in {}'.format(name, plugins[name], jar)) # It's important to use relative paths, as the compiler flags get embedded in the zinc # analysis file, and we port those between systems via the artifact cache. plugins[name] = os.path.relpath(jar, buildroot) except KeyError: pass unresolved_plugins = plugin_names - set(plugins.keys()) if unresolved_plugins: raise TaskError('Could not find requested plugins: {}'.format(list(unresolved_plugins))) return plugins # Invalidate caches if the toolchain changes. def _language_platform_version_info(self): ret = [] # Go through all the bootstrap tools required to compile. targets = (ScalaPlatform.global_instance().tool_targets(self.context, 'scalac') + self.tool_targets(self.context, 'zinc')) for lib in (t for t in targets if isinstance(t, JarLibrary)): for jar in lib.jar_dependencies: ret.append(jar.cache_key()) # We must invalidate on the set of plugins and their settings. ret.extend(self.plugin_args()) # Invalidate if any compiler args change. # Note that while some args are obviously important for invalidation (e.g., the jvm target # version), some might not be. However we must invalidated on all the args, because Zinc # ignores analysis files if the compiler args they were created with are different from the # current ones, and does a full recompile. So if we allow cached artifacts with those analysis # files to be used, Zinc will do unnecessary full recompiles on subsequent edits. ret.extend(self._args) # Invalidate if use of name hashing changes. ret.append('name-hashing-{0}'.format('on' if self.get_options().name_hashing else 'off')) return ret def extra_products(self, target): """Override extra_products to produce a plugin information file.""" ret = [] if target.is_scalac_plugin and target.classname: # NB: We don't yet support explicit in-line compilation of scala compiler plugins from # the workspace to be used in subsequent compile rounds like we do for annotation processors # with javac. This would require another GroupTask similar to AptCompile, but for scala. root, plugin_info_file = self.write_plugin_info(self._plugin_info_dir, target) ret.append((root, [plugin_info_file])) return ret def compile(self, args, classpath, sources, classes_output_dir, upstream_analysis, analysis_file): return self._zinc_utils.compile(args, classpath, sources, classes_output_dir, analysis_file, upstream_analysis)
class ScalaCompile(JvmCompile): _language = 'scala' _file_suffix = '.scala' @classmethod def get_args_default(cls, bootstrap_option_values): return ('-S-encoding', '-SUTF-8','-S-g:vars') @classmethod def get_warning_args_default(cls): return ('-S-deprecation', '-S-unchecked') @classmethod def get_no_warning_args_default(cls): return ('-S-nowarn',) @classmethod def register_options(cls, register): super(ScalaCompile, cls).register_options(register) # Note: Used in ZincUtils. # TODO: Revisit this. It's unintuitive for ZincUtils to reach back into the task for options. register('--plugins', action='append', help='Use these scalac plugins.') ZincUtils.register_options(register, cls.register_jvm_tool) def __init__(self, *args, **kwargs): super(ScalaCompile, self).__init__(*args, **kwargs) # Set up the zinc utils. color = not self.get_options().no_colors self._zinc_utils = ZincUtils(context=self.context, nailgun_task=self, jvm_options=self._jvm_options, color=color, log_level=self.get_options().level) def create_analysis_tools(self): return AnalysisTools(self.context.java_home, self.ivy_cache_dir, ZincAnalysisParser(self._classes_dir), ZincAnalysis) def extra_compile_time_classpath_elements(self): # Classpath entries necessary for our compiler plugins. return self._zinc_utils.plugin_jars() # Invalidate caches if the toolchain changes. def platform_version_info(self): zinc_invalidation_key = self._zinc_utils.platform_version_info() jvm_target_version = '' # Check scalac args for jvm target version. for arg in self._args: if arg.strip().startswith("-S-target:"): jvm_target_version = arg.strip() zinc_invalidation_key.append(jvm_target_version) return zinc_invalidation_key def extra_products(self, target): ret = [] if target.is_scalac_plugin and target.classname: root, plugin_info_file = ZincUtils.write_plugin_info(self._resources_dir, target) ret.append((root, [plugin_info_file])) return ret def compile(self, args, classpath, sources, classes_output_dir, analysis_file): # We have to treat our output dir as an upstream element, so zinc can find valid # analysis for previous partitions. We use the global valid analysis for the upstream. upstream = ({classes_output_dir: self._analysis_file} if os.path.exists(self._analysis_file) else {}) return self._zinc_utils.compile(args, classpath + [self._classes_dir], sources, classes_output_dir, analysis_file, upstream)