def process_rst(self, workunit, page, output_path, source, fragmented): source_path = os.path.join(get_buildroot(), source) with codecs.open(source_path, 'r', 'utf-8') as source_stream: rst_html, returncode = util().rst_to_html(source_stream.read(), stderr=workunit.output('stderr')) if returncode != 0: message = '{} rendered with errors.'.format(source_path) if self.get_options().ignore_failure: self.context.log.warn(message) else: raise TaskError(message, exit_code=returncode, failed_targets=[page]) template_path = os.path.join(self._templates_dir, 'fragment.mustache' if fragmented else 'page.mustache') template = resource_string(__name__, template_path) generator = Generator(template, md_html=rst_html) safe_mkdir(os.path.dirname(output_path)) with codecs.open(output_path, 'w', 'utf-8') as output: generator.write(output) return output.name
def generate_ivysettings(self, ivy, publishedjars, publish_local=None): template_relpath = os.path.join(_TEMPLATES_RELPATH, 'ivysettings.mustache') template_text = pkgutil.get_data(__name__, template_relpath) published = [ TemplateData(org=jar.org, name=jar.name) for jar in publishedjars ] generator = Generator(template_text, ivysettings=self.fetch_ivysettings(ivy), dir=self.workdir, cachedir=self.cachedir, published=published, publish_local=publish_local) with safe_open(os.path.join(self.workdir, 'ivysettings.xml'), 'w') as wrapper: generator.write(wrapper) return wrapper.name
def generate_ivy(self, jar, version, publications): template_relpath = os.path.join(_TEMPLATES_RELPATH, "ivy.xml.mustache") template_text = pkgutil.get_data(__name__, template_relpath).decode() pubs = [ TemplateData( name=None if p.name == jar.name else p.name, classifier=p.classifier, ext=None if p.ext == "jar" else p.ext, ) for p in publications ] generator = Generator( template_text, org=jar.org, name=jar.name, rev=version, publications=pubs ) with safe_open(os.path.join(self.workdir, "ivy.xml"), "w") as ivyxml: generator.write(ivyxml) return ivyxml.name
def _generate_ivy(self, targets, jars, excludes, ivyxml, confs): org, name = self.identify(targets) # As it turns out force is not transitive - it only works for dependencies pants knows about # directly (declared in BUILD files - present in generated ivy.xml). The user-level ivy docs # don't make this clear [1], but the source code docs do (see isForce docs) [2]. I was able to # edit the generated ivy.xml and use the override feature [3] though and that does work # transitively as you'd hope. # # [1] http://ant.apache.org/ivy/history/2.3.0/settings/conflict-managers.html # [2] https://svn.apache.org/repos/asf/ant/ivy/core/branches/2.3.0/ # src/java/org/apache/ivy/core/module/descriptor/DependencyDescriptor.java # [3] http://ant.apache.org/ivy/history/2.3.0/ivyfile/override.html dependencies = [ self._generate_jar_template(jar, confs) for jar in jars ] overrides = [ self._generate_override_template(dep) for dep in dependencies if dep.force ] excludes = [ self._generate_exclude_template(exclude) for exclude in excludes ] template_data = TemplateData(org=org, module=name, version='latest.integration', publications=None, configurations=confs, dependencies=dependencies, excludes=excludes, overrides=overrides) safe_mkdir(os.path.dirname(ivyxml)) with open(ivyxml, 'w') as output: generator = Generator(pkgutil.get_data(__name__, self._template_path), root_dir=get_buildroot(), lib=template_data) generator.write(output)
def generate_ivysettings(self, ivy, publishedjars, publish_local=None): if ivy.ivy_settings is None: raise TaskError( 'A custom ivysettings.xml with writeable resolvers is required for' 'publishing, but none was configured.') template_relpath = os.path.join('templates', 'jar_publish', 'ivysettings.mustache') template = pkgutil.get_data(__name__, template_relpath) with safe_open(os.path.join(self.workdir, 'ivysettings.xml'), 'w') as wrapper: generator = Generator(template, ivysettings=ivy.ivy_settings, dir=self.workdir, cachedir=self.cachedir, published=[ TemplateData(org=jar.org, name=jar.name) for jar in publishedjars ], publish_local=publish_local) generator.write(wrapper) return wrapper.name
def write(self, target, path): dependencies = OrderedDict() for internal_dep in target_internal_dependencies(target): jar = self._as_versioned_jar(internal_dep) key = (jar.org, jar.name) dependencies[key] = self._internaldep(jar, internal_dep) for jar in target.jar_dependencies: jardep = self._jardep(jar) if jardep: key = (jar.org, jar.name, jar.classifier) dependencies[key] = jardep target_jar = self._internaldep(self._as_versioned_jar(target), target) if target_jar: target_jar = target_jar.extend(dependencies=list(dependencies.values())) template_relpath = os.path.join(_TEMPLATES_RELPATH, 'pom.xml.mustache') template_text = pkgutil.get_data(__name__, template_relpath).decode('utf-8') generator = Generator(template_text, project=target_jar) with safe_open(path, 'w') as output: generator.write(output)
def build_rpm(self, platform, vt, build_dir): # Copy the spec file to the build directory. target = vt.target rpm_spec_path = os.path.join(get_buildroot(), target.rpm_spec) shutil.copy(rpm_spec_path, build_dir) spec_basename = os.path.basename(target.rpm_spec) # Resolve the build requirements. build_reqs = self.extract_build_reqs(rpm_spec_path) # TODO(mateo): There is a bit of an API conflation now that we have remote_source urls and targets. # Especially when you consider that there is also sources/dependencies. # The distinction between these things is going to be confusing, they should be unified or at least streamlined. # Copy sources to the buildroot. (TODO - unify these stanzas, they differ only in being relative vs absolute paths) local_sources = [] for source in self._remote_source_targets(target): remote_source = RemoteSourceFetcher.Factory.scoped_instance( self).create(source) source_path = remote_source.path shutil.copy(source_path, build_dir) local_sources.append({ 'basename': os.path.basename(os.path.relpath(source_path, get_buildroot())), }) for source_rel_path in target.sources_relative_to_buildroot(): shutil.copy(os.path.join(get_buildroot(), source_rel_path), build_dir) local_sources.append({ 'basename': os.path.basename(source_rel_path), }) # Setup information on remote sources. def convert_remote_source(remote_source): if isinstance(remote_source, string_types): return { 'url': remote_source, 'basename': os.path.basename(remote_source) } elif isinstance(remote_source, tuple): return {'url': remote_source[0], 'basename': remote_source[1]} else: raise ValueError( 'invalid remote_source entry: {}'.format(remote_source)) remote_sources = [ convert_remote_source(rs) for rs in target.remote_sources ] # Put together rpmbuild options for defines. rpmbuild_options = '' for key in sorted(target.defines.keys()): quoted_value = str(target.defines[key]).replace("\\", "\\\\").replace( "\"", "\\\"") rpmbuild_options += ' --define="%{} {}"'.format(key, quoted_value) # Write the entry point script. entrypoint_generator = Generator( resource_string(__name__, 'build_rpm.sh.mustache'), spec_basename=spec_basename, pre_commands=[{ 'command': '/bin/bash -i' }] if self.get_options().shell_before else [], post_commands=[{ 'command': '/bin/bash -i' }] if self.get_options().shell_after else [], rpmbuild_options=rpmbuild_options, ) entrypoint_path = os.path.join(build_dir, 'build_rpm.sh') with open(entrypoint_path, 'wb') as f: f.write(entrypoint_generator.render()) os.chmod(entrypoint_path, 0555) # Copy globally-configured files into build directory. for context_file_path_template in self.get_options( ).docker_build_context_files: context_file_path = context_file_path_template.format( platform_id=platform['id']) shutil.copy(context_file_path, build_dir) # Determine setup commands. setup_commands = [{ 'command': command.format(platform_id=platform['id']) } for command in self.get_options().docker_build_setup_commands] # Get the RPMs created by the target's RpmSpecTarget dependencies. rpm_products = [] for dep in target.dependencies: if isinstance(dep, RpmSpecTarget): specs = self.context.products.get('rpms')[dep] if specs: for dirname, relpath in specs.items(): for rpmpath in relpath: local_rpm = os.path.join(dirname, rpmpath) shutil.copy(local_rpm, build_dir) rpm_products.append({ 'local_rpm': os.path.basename(rpmpath), }) # Write the Dockerfile for this build. dockerfile_generator = Generator( resource_string(__name__, 'dockerfile_template.mustache'), image=platform['base'], setup_commands=setup_commands, spec_basename=spec_basename, rpm_dependencies=rpm_products, build_reqs={'reqs': ' '.join(build_reqs)} if build_reqs else None, local_sources=local_sources, remote_sources=remote_sources, ) dockerfile_path = os.path.join(build_dir, 'Dockerfile') with open(dockerfile_path, 'wb') as f: f.write(dockerfile_generator.render()) # Generate a UUID to identify the image. uuid_identifier = uuid.uuid4() image_base_name = 'rpm-image-{}'.format(uuid_identifier) image_name = '{}:latest'.format(image_base_name) container_name = None try: # Build the Docker image that will build the RPMS. build_image_cmd = [ self.get_options().docker, 'build', ] if self.get_options().docker_build_no_cache: build_image_cmd.append('--no-cache') build_image_cmd.extend([ '-t', image_name, build_dir, ]) with self.docker_workunit(name='build-image', cmd=build_image_cmd) as workunit: self.context.log.debug('Executing: {}'.format( ' '.join(build_image_cmd))) proc = subprocess.Popen(build_image_cmd, stdout=workunit.output('stdout'), stderr=subprocess.STDOUT) returncode = proc.wait() if returncode != 0: raise TaskError( 'Failed to build image, returncode={0}'.format( returncode)) # Run the image in a container to actually build the RPMs. container_name = 'rpm-container-{}'.format(uuid_identifier) run_container_cmd = [ self.get_options().docker, 'run', '--attach=stderr', '--attach=stdout', '--name={}'.format(container_name), ] if self.get_options().shell_before or self.get_options( ).shell_after: run_container_cmd.extend(['-i', '-t']) run_container_cmd.extend([ image_name, ]) with self.docker_workunit(name='run-container', cmd=run_container_cmd) as workunit: proc = subprocess.Popen(run_container_cmd, stdout=workunit.output('stdout'), stderr=subprocess.STDOUT) returncode = proc.wait() if returncode != 0: raise TaskError( 'Failed to build RPM, returncode={0}'.format( returncode)) # TODO(mateo): Convert this to output to a per-platform namespace to make it easy to upload all RPMs to the # correct platform (something like: `dist/rpmbuilder/centos7/x86_64/foo.rpm`). # Extract the built RPMs from the container. extract_rpms_cmd = [ self.get_options().docker, 'export', container_name, ] with self.docker_workunit(name='extract-rpms', cmd=extract_rpms_cmd) as workunit: proc = subprocess.Popen(extract_rpms_cmd, stdout=subprocess.PIPE, stderr=None) with tarfile.open(fileobj=proc.stdout, mode='r|*') as tar: for entry in tar: name = entry.name if (name.startswith('home/rpmuser/rpmbuild/RPMS/') or name.startswith('home/rpmuser/rpmbuild/SRPMS/') ) and name.endswith('.rpm'): rel_rpm_path = name.lstrip( 'home/rpmuser/rpmbuild/') if rel_rpm_path: rpmdir = os.path.dirname(rel_rpm_path) safe_mkdir(os.path.join( vt.results_dir, rpmdir)) rpmfile = os.path.join(vt.results_dir, rel_rpm_path) self.context.log.info( 'Extracting {}'.format(rel_rpm_path)) fileobj = tar.extractfile(entry) # NOTE(mateo): I believe it has free streaming w/ context manager/stream mode. But this doesn't hurt! with open(rpmfile, 'wb') as f: self.write_stream(fileobj, f) output_dir = os.path.join( self.get_options().pants_distdir, 'rpmbuild', rpmdir) safe_mkdir(output_dir) shutil.copy(rpmfile, output_dir) if name.startswith( 'home/rpmuser/rpmbuild/RPMS/'): self.context.products.get('rpms').add( vt.target, vt.results_dir).append(rel_rpm_path) else: self.context.products.get('srpms').add( vt.target, vt.results_dir).append(rel_rpm_path) retcode = proc.wait() if retcode != 0: raise TaskError('Failed to extract RPMS') else: # Save the resulting image if asked. Eventually this image should be pushed to the registry every build, # and subsequent invocations on the published RPM should simply pull and extract. if self.get_options().commit_container_image: commited_name = 'rpm-commited-image-{}'.format( uuid_identifier) self.context.log.info( 'Saving container state as image...') docker_commit_cmd = [ self.get_options().docker, 'commit', container_name ] with self.docker_workunit( name='commit-to-image', cmd=docker_commit_cmd) as workunit: subprocess.call(docker_commit_cmd, stdout=workunit.output('stdout'), stderr=subprocess.STDOUT) self.context.log.info( 'Saved container as image: {}\n'.format( commited_name)) finally: # Remove the build container. if container_name and not self.get_options().keep_build_products: remove_container_cmd = [ self.get_options().docker, 'rm', container_name ] with self.docker_workunit( name='remove-build-container', cmd=remove_container_cmd) as workunit: subprocess.call(remove_container_cmd, stdout=workunit.output('stdout'), stderr=subprocess.STDOUT) # Remove the build image. if not self.get_options().keep_build_products: remove_image_cmd = [ self.get_options().docker, 'rmi', image_name ] with self.docker_workunit(name='remove-build-image', cmd=remove_image_cmd) as workunit: subprocess.call(remove_image_cmd, stdout=workunit.output('stdout'), stderr=subprocess.STDOUT)
def apply_template(output_path, template_relpath, **template_data): with safe_open(output_path, 'w') as output: Generator(pkgutil.get_data(__name__, template_relpath), **template_data).write(output)
def _compile_target(self, target): # "Compiles" a target by forming an isolated chroot of its sources and transitive deps and then # attempting to import each of the target's sources in the case of a python library or else the # entry point in the case of a python binary. # # For a library with sources lib/core.py and lib/util.py a "compiler" main file would look like: # # if __name__ == '__main__': # import lib.core # import lib.util # # For a binary with entry point lib.bin:main the "compiler" main file would look like: # # if __name__ == '__main__': # from lib.bin import main # # In either case the main file is executed within the target chroot to reveal missing BUILD # dependencies. with self.context.new_workunit(name=target.address.spec): modules = [] if isinstance(target, PythonBinary): source = 'entry_point {}'.format(target.entry_point) components = target.entry_point.rsplit(':', 1) module = components[0] if len(components) == 2: function = components[1] data = TemplateData( source=source, import_statement='from {} import {}'.format( module, function)) else: data = TemplateData( source=source, import_statement='import {}'.format(module)) modules.append(data) else: for path in target.sources_relative_to_source_root(): if path.endswith('.py'): if os.path.basename(path) == '__init__.py': module_path = os.path.dirname(path) else: module_path, _ = os.path.splitext(path) source = 'file {}'.format( os.path.join(target.target_base, path)) module = module_path.replace(os.path.sep, '.') data = TemplateData( source=source, import_statement='import {}'.format(module)) modules.append(data) if not modules: # Nothing to eval, so a trivial compile success. return 0 interpreter = self.select_interpreter_for_targets([target]) if isinstance(target, PythonBinary): pexinfo, platforms = target.pexinfo, target.platforms else: pexinfo, platforms = None, None generator = Generator(pkgutil.get_data(__name__, self._EVAL_TEMPLATE_PATH), chroot_parent=self.chroot_cache_dir, modules=modules) executable_file_content = generator.render() with self.cached_chroot( interpreter=interpreter, pex_info=pexinfo, targets=[target], platforms=platforms, executable_file_content=executable_file_content) as chroot: pex = chroot.pex() with self.context.new_workunit( name='eval', labels=[ WorkUnitLabel.COMPILER, WorkUnitLabel.RUN, WorkUnitLabel.TOOL ], cmd=' '.join(pex.cmdline())) as workunit: returncode = pex.run(stdout=workunit.output('stdout'), stderr=workunit.output('stderr')) workunit.set_outcome(WorkUnit.SUCCESS if returncode == 0 else WorkUnit.FAILURE) if returncode != 0: self.context.log.error('Failed to eval {}'.format( target.address.spec)) return returncode
def _get_executable_file_content(self, exec_pex_parent, modules): generator = Generator(pkgutil.get_data(__name__, self._EVAL_TEMPLATE_PATH), chroot_parent=exec_pex_parent, modules=modules) return generator.render()
def generate_pom(self, tgt, version, path): closure = OrderedSet([t for t in tgt.closure() if t is not tgt]) # Remove all transitive deps of Pom dependencies and then add back the pom_dep itself. pom_deps = [t for t in closure if isinstance(t, PomTarget)] for pom in pom_deps: closure -= pom.closure() closure.add(pom) dependencies = OrderedDict() for dep in closure: if isinstance(dep, PomTarget): dep_jar = TemplateData( artifact_id=dep.payload.provides.name, group_id=dep.payload.provides.org, version=version, scope='compile', ) key = (dep.payload.provides.org, dep.payload.provides.name) dependencies[key] = dep_jar elif isinstance(dep, Resources): pass elif isinstance(dep, JarLibrary): for jar in dep.jar_dependencies: dep_jar = TemplateData( artifact_id=jar.name, group_id=jar.org, version=jar.rev, scope='compile', ) key = (jar.org, jar.name, jar.classifier) dependencies[key] = dep_jar else: pass # TODO(mateo): This needs to be configurable - preferably as a dependency or at least an option. # We are now using it for internal libs - so this confusing and should be fixed soon-ish. target_jar = TemplateData( artifact_id=tgt.payload.provides.name, group_id=tgt.payload.provides.org, version=version, scope='compile', dependencies=dependencies.values(), # TODO(dan): These should really come from an OSSRHPublicationMetadata # instance, but it might have to be made a Target first so we don't # duplicate it for every PomTarget. name='fsq.io', description='Foursquare Opensource', url='http://github.com/foursquare/fsqio', licenses=[ TemplateData( name='Apache', url='http://www.opensource.org/licenses/Apache-2.0', ) ], scm=TemplateData( url='[email protected]:foursquare/spindle.git', # TODO(dan): Are these the right values? connection='scm:git:[email protected]:foursquare/fsqio.git', developer_connection= 'scm:git:[email protected]:foursquare/fsqio.git', ), developers=[ TemplateData( id='paperstreet', name='Daniel Harrison', url='https://github.com/paperstreet', ), TemplateData( id='mateor', name='Mateo Rodriguez', url='https://github.com/mateor', ), ], ) template_relpath = os.path.join(_TEMPLATES_RELPATH, 'pom.mustache') template_text = pkgutil.get_data(__name__, template_relpath) generator = Generator(template_text, project=target_jar) with safe_open(path, 'wb') as output: generator.write(output)
def generate_project(self, project): def is_test(source_set): # Non test targets that otherwise live in test target roots (say a java_library), must # be marked as test for IDEA to correctly link the targets with the test code that uses # them. Therefore we check the base instead of the is_test flag. return source_set.source_base in SourceSet.TEST_BASES def create_content_root(source_set): root_relative_path = os.path.join(source_set.source_base, source_set.path) \ if source_set.path else source_set.source_base sources = TemplateData(path=root_relative_path, package_prefix=source_set.path.replace( '/', '.') if source_set.path else None, is_test=is_test(source_set)) return TemplateData( path=root_relative_path, sources=[sources], exclude_paths=[ os.path.join(source_set.source_base, x) for x in source_set.excludes ], ) content_roots = [ create_content_root(source_set) for source_set in project.sources ] if project.has_python: content_roots.extend( create_content_root(source_set) for source_set in project.py_sources) scala = None if project.has_scala: scala = TemplateData( language_level=self.scala_language_level, maximum_heap_size=self.scala_maximum_heap_size, fsc=self.fsc, compiler_classpath=project.scala_compiler_classpath) configured_module = TemplateData( root_dir=get_buildroot(), path=self.module_filename, content_roots=content_roots, bash=self.bash, python=project.has_python, scala=scala, internal_jars=[cp_entry.jar for cp_entry in project.internal_jars], internal_source_jars=[ cp_entry.source_jar for cp_entry in project.internal_jars if cp_entry.source_jar ], external_jars=[cp_entry.jar for cp_entry in project.external_jars], external_javadoc_jars=[ cp_entry.javadoc_jar for cp_entry in project.external_jars if cp_entry.javadoc_jar ], external_source_jars=[ cp_entry.source_jar for cp_entry in project.external_jars if cp_entry.source_jar ], extra_components=[], ) outdir = os.path.abspath(self.intellij_output_dir) if not os.path.exists(outdir): os.makedirs(outdir) configured_project = TemplateData( root_dir=get_buildroot(), outdir=outdir, modules=[configured_module], java=TemplateData(encoding=self.java_encoding, maximum_heap_size=self.java_maximum_heap_size, jdk=self.java_jdk, language_level='JDK_1_%d' % self.java_language_level), resource_extensions=list(project.resource_extensions), scala=scala, checkstyle_suppression_files=','.join( project.checkstyle_suppression_files), checkstyle_classpath=';'.join(project.checkstyle_classpath), debug_port=project.debug_port, extra_components=[], ) existing_project_components = None existing_module_components = None if not self.nomerge: # Grab the existing components, which may include customized ones. existing_project_components = self._parse_xml_component_elements( self.project_filename) existing_module_components = self._parse_xml_component_elements( self.module_filename) # Generate (without merging in any extra components). safe_mkdir(os.path.abspath(self.intellij_output_dir)) ipr = self._generate_to_tempfile( Generator(pkgutil.get_data(__name__, self.project_template), project=configured_project)) iml = self._generate_to_tempfile( Generator(pkgutil.get_data(__name__, self.module_template), module=configured_module)) if not self.nomerge: # Get the names of the components we generated, and then delete the # generated files. Clunky, but performance is not an issue, and this # is an easy way to get those component names from the templates. extra_project_components = self._get_components_to_merge( existing_project_components, ipr) extra_module_components = self._get_components_to_merge( existing_module_components, iml) os.remove(ipr) os.remove(iml) # Generate again, with the extra components. ipr = self._generate_to_tempfile( Generator(pkgutil.get_data(__name__, self.project_template), project=configured_project.extend( extra_components=extra_project_components))) iml = self._generate_to_tempfile( Generator(pkgutil.get_data(__name__, self.module_template), module=configured_module.extend( extra_components=extra_module_components))) shutil.move(ipr, self.project_filename) shutil.move(iml, self.module_filename) print('\nGenerated project at %s%s' % (self.gen_project_workdir, os.sep)) return self.project_filename if self.open else None
def generate_project(self, project): def create_content_root(source_set): root_relative_path = os.path.join(source_set.source_base, source_set.path) \ if source_set.path else source_set.source_base if source_set.resources_only: if source_set.is_test: content_type = 'java-test-resource' else: content_type = 'java-resource' else: content_type = '' sources = TemplateData(path=root_relative_path, package_prefix=source_set.path.replace( '/', '.') if source_set.path else None, is_test=source_set.is_test, content_type=content_type) return TemplateData( path=root_relative_path, sources=[sources], exclude_paths=[ os.path.join(source_set.source_base, x) for x in source_set.excludes ], ) content_roots = [ create_content_root(source_set) for source_set in project.sources ] if project.has_python: content_roots.extend( create_content_root(source_set) for source_set in project.py_sources) java_language_level = None for target in project.targets: if isinstance(target, JvmTarget): if java_language_level is None or java_language_level < target.platform.source_level: java_language_level = target.platform.source_level if java_language_level is not None: java_language_level = 'JDK_{0}_{1}'.format( *java_language_level.components[:2]) outdir = os.path.abspath(self.intellij_output_dir) if not os.path.exists(outdir): os.makedirs(outdir) scm = get_scm() configured_project = TemplateData( root_dir=get_buildroot(), outdir=outdir, git_root=scm.worktree, java=TemplateData(encoding=self.java_encoding, jdk=self.java_jdk, language_level='JDK_1_{}'.format( self.java_language_level)), resource_extensions=list(project.resource_extensions), debug_port=project.debug_port, extra_components=[], java_language_level=java_language_level, ) if not self.context.options.target_specs: raise TaskError("No targets specified.") abs_target_specs = [ os.path.join(get_buildroot(), spec) for spec in self.context.options.target_specs ] configured_workspace = TemplateData( targets=json.dumps(abs_target_specs), project_path=os.path.join(get_buildroot(), abs_target_specs[0].split(':')[0]), idea_plugin_version=IDEA_PLUGIN_VERSION) # Generate (without merging in any extra components). safe_mkdir(os.path.abspath(self.intellij_output_dir)) ipr = self._generate_to_tempfile( Generator(pkgutil.get_data(__name__, self.project_template), project=configured_project)) iws = self._generate_to_tempfile( Generator(pkgutil.get_data(__name__, self.workspace_template), workspace=configured_workspace)) self._outstream.write(self.gen_project_workdir) shutil.move(ipr, self.project_filename) shutil.move(iws, self.workspace_filename) return self.project_filename
def generate_ivy(cls, targets, jars, excludes, ivyxml, confs, resolve_hash_name=None, pinned_artifacts=None): if resolve_hash_name: org = IvyUtils.INTERNAL_ORG_NAME name = resolve_hash_name else: org, name = cls.identify(targets) extra_configurations = [ conf for conf in confs if conf and conf != 'default' ] jars_by_key = OrderedDict() for jar in jars: jars = jars_by_key.setdefault((jar.org, jar.name), []) jars.append(jar) manager = JarDependencyManagement.global_instance() artifact_set = PinnedJarArtifactSet( pinned_artifacts) # Copy, because we're modifying it. for jars in jars_by_key.values(): for i, dep in enumerate(jars): direct_coord = M2Coordinate.create(dep) managed_coord = artifact_set[direct_coord] if direct_coord.rev != managed_coord.rev: # It may be necessary to actually change the version number of the jar we want to resolve # here, because overrides do not apply directly (they are exclusively transitive). This is # actually a good thing, because it gives us more control over what happens. coord = manager.resolve_version_conflict(managed_coord, direct_coord, force=dep.force) dep = copy.copy(dep) dep.rev = coord.rev jars[i] = dep elif dep.force: # If this dependency is marked as 'force' and there is no version conflict, use the normal # pants behavior for 'force'. artifact_set.put(direct_coord) dependencies = [ cls._generate_jar_template(jars) for jars in jars_by_key.values() ] # As it turns out force is not transitive - it only works for dependencies pants knows about # directly (declared in BUILD files - present in generated ivy.xml). The user-level ivy docs # don't make this clear [1], but the source code docs do (see isForce docs) [2]. I was able to # edit the generated ivy.xml and use the override feature [3] though and that does work # transitively as you'd hope. # # [1] http://ant.apache.org/ivy/history/2.3.0/settings/conflict-managers.html # [2] https://svn.apache.org/repos/asf/ant/ivy/core/branches/2.3.0/ # src/java/org/apache/ivy/core/module/descriptor/DependencyDescriptor.java # [3] http://ant.apache.org/ivy/history/2.3.0/ivyfile/override.html overrides = [ cls._generate_override_template(coord) for coord in artifact_set ] excludes = [ cls._generate_exclude_template(exclude) for exclude in excludes ] template_data = TemplateData(org=org, module=name, extra_configurations=extra_configurations, dependencies=dependencies, excludes=excludes, overrides=overrides) template_relpath = os.path.join('templates', 'ivy_utils', 'ivy.mustache') template_text = pkgutil.get_data(__name__, template_relpath) generator = Generator(template_text, lib=template_data) with safe_open(ivyxml, 'w') as output: generator.write(output)
def generate_project(self, project): def create_content_root(source_set): root_relative_path = os.path.join(source_set.source_base, source_set.path) \ if source_set.path else source_set.source_base if self.get_options().infer_test_from_siblings: is_test = IdeaGen._sibling_is_test(source_set) else: is_test = source_set.is_test sources = TemplateData( path=root_relative_path, package_prefix=source_set.path.replace('/', '.') if source_set.path else None, is_test=is_test ) return TemplateData( path=root_relative_path, sources=[sources], exclude_paths=[os.path.join(source_set.source_base, x) for x in source_set.excludes], ) content_roots = [create_content_root(source_set) for source_set in project.sources] if project.has_python: content_roots.extend(create_content_root(source_set) for source_set in project.py_sources) scala = None if project.has_scala: scala = TemplateData( language_level=self.scala_language_level, maximum_heap_size=self.scala_maximum_heap_size, fsc=self.fsc, compiler_classpath=project.scala_compiler_classpath ) exclude_folders = [] if self.get_options().exclude_maven_target: exclude_folders += IdeaGen._maven_targets_excludes(get_buildroot()) exclude_folders += self.get_options().exclude_folders configured_module = TemplateData( root_dir=get_buildroot(), path=self.module_filename, content_roots=content_roots, bash=self.bash, python=project.has_python, scala=scala, internal_jars=[cp_entry.jar for cp_entry in project.internal_jars], internal_source_jars=[cp_entry.source_jar for cp_entry in project.internal_jars if cp_entry.source_jar], external_jars=[cp_entry.jar for cp_entry in project.external_jars], external_javadoc_jars=[cp_entry.javadoc_jar for cp_entry in project.external_jars if cp_entry.javadoc_jar], external_source_jars=[cp_entry.source_jar for cp_entry in project.external_jars if cp_entry.source_jar], extra_components=[], exclude_folders=exclude_folders, ) outdir = os.path.abspath(self.intellij_output_dir) if not os.path.exists(outdir): os.makedirs(outdir) configured_project = TemplateData( root_dir=get_buildroot(), outdir=outdir, git_root=Git.detect_worktree(), modules=[ configured_module ], java=TemplateData( encoding=self.java_encoding, maximum_heap_size=self.java_maximum_heap_size, jdk=self.java_jdk, language_level = 'JDK_1_%d' % self.java_language_level ), resource_extensions=list(project.resource_extensions), scala=scala, checkstyle_classpath=';'.join(project.checkstyle_classpath), debug_port=project.debug_port, extra_components=[], ) existing_project_components = None existing_module_components = None if not self.nomerge: # Grab the existing components, which may include customized ones. existing_project_components = self._parse_xml_component_elements(self.project_filename) existing_module_components = self._parse_xml_component_elements(self.module_filename) # Generate (without merging in any extra components). safe_mkdir(os.path.abspath(self.intellij_output_dir)) ipr = self._generate_to_tempfile( Generator(pkgutil.get_data(__name__, self.project_template), project = configured_project)) iml = self._generate_to_tempfile( Generator(pkgutil.get_data(__name__, self.module_template), module = configured_module)) if not self.nomerge: # Get the names of the components we generated, and then delete the # generated files. Clunky, but performance is not an issue, and this # is an easy way to get those component names from the templates. extra_project_components = self._get_components_to_merge(existing_project_components, ipr) extra_module_components = self._get_components_to_merge(existing_module_components, iml) os.remove(ipr) os.remove(iml) # Generate again, with the extra components. ipr = self._generate_to_tempfile(Generator(pkgutil.get_data(__name__, self.project_template), project = configured_project.extend(extra_components = extra_project_components))) iml = self._generate_to_tempfile(Generator(pkgutil.get_data(__name__, self.module_template), module = configured_module.extend(extra_components = extra_module_components))) self.context.log.info('Generated IntelliJ project in {directory}' .format(directory=self.gen_project_workdir)) shutil.move(ipr, self.project_filename) shutil.move(iml, self.module_filename) return self.project_filename if self.open else None
def gen_file(template_file_name, **mustache_kwargs): return self._generate_to_tempfile( Generator( pkgutil.get_data(__name__, template_file_name).decode(), **mustache_kwargs))
def _write_ivy_xml_file(cls, ivyxml, template_data, template_relpath): template_text = pkgutil.get_data(__name__, template_relpath) generator = Generator(template_text, lib=template_data) with safe_open(ivyxml, 'w') as output: generator.write(output)
def process_md(self, output_path, source, fragmented, url_builder, css=None): def parse_url(spec): match = self.PANTS_LINK.match(spec) if match: address = Address.parse(match.group(1), relative_to=get_buildroot()) page = self.context.build_graph.get_target(address) anchor = match.group(2) or '' if not page: raise TaskError( 'Invalid markdown link to pants target: "{}". '.format( match.group(1)) + 'Is your page missing a dependency on this target?') alias, url = url_builder(page) return alias, url + anchor else: return spec, spec def build_url(label): components = label.split('|', 1) if len(components) == 1: return parse_url(label.strip()) else: alias, link = components _, url = parse_url(link.strip()) return alias, url wikilinks = WikilinksExtension(build_url) safe_mkdir(os.path.dirname(output_path)) with codecs.open(output_path, 'w', 'utf-8') as output: source_path = os.path.join(get_buildroot(), source) with codecs.open(source_path, 'r', 'utf-8') as source_stream: md_html = markdown.markdown( source_stream.read(), extensions=[ 'codehilite(guess_lang=False)', 'extra', 'tables', 'toc', wikilinks, IncludeExcerptExtension(source_path) ], ) if fragmented: style_css = (HtmlFormatter( style=self.code_style)).get_style_defs('.codehilite') template = resource_string( __name__, os.path.join(self._templates_dir, 'fragment.mustache')) generator = Generator(template, style_css=style_css, md_html=md_html) generator.write(output) else: style_link = os.path.relpath(css, os.path.dirname(output_path)) template = resource_string( __name__, os.path.join(self._templates_dir, 'page.mustache')) generator = Generator(template, style_link=style_link, md_html=md_html) generator.write(output) return output.name
def _generate_module_files(self, configured_modules): return [(name, self._generate_to_tempfile(Generator(pkgutil.get_data(__name__, self.module_template), module=module))) for name, module in configured_modules.items()]