def main(args): args = _parse_arguments(args) repo_root = common.get_repo_root(args.repo_root) cfg = config.load(repo_root, args.verbose) pom_content = pomcontentm.PomContent() if args.pom_description is not None: pom_content.description = args.pom_description if args.verbose: logger.debug("Global pom content: %s" % pom_content) mvn_install_info = maveninstallinfo.MavenInstallInfo( cfg.maven_install_paths) ws = workspace.Workspace(repo_root, cfg.excluded_dependency_paths, cfg.all_src_exclusions, mvn_install_info, pom_content) packages = argsupport.get_all_packages(repo_root, args.package) packages = ws.filter_artifact_producing_packages(packages) if len(packages) == 0: raise Exception( "Did not find any artifact producing BUILD.pom packages at [%s]" % args.package) spider = crawler.Crawler(ws, cfg.pom_template, args.verbose) result = spider.crawl(packages, follow_monorepo_references=args.recursive, force_release=args.force) if len(result.pomgens) == 0: logger.info( "No releases are required. pomgen will not generate any pom files. To force pom generation, use pomgen's --force option." ) else: output_dir = _get_output_dir(args) for pomgen in result.pomgens: pom_dest_dir = os.path.join(output_dir, pomgen.bazel_package) if not os.path.exists(pom_dest_dir): os.makedirs(pom_dest_dir) # the goldfile pom is actually a pomgen metadata file, so we # write it using the mdfiles module, which ensures it goes # into the proper location within the specified bazel package if args.pom_goldfile: pom_content = pomgen.gen(pom.PomContentType.GOLDFILE) pom_goldfile_path = mdfiles.write_file( pom_content, output_dir, pomgen.bazel_package, mdfiles.POM_XML_RELEASED_FILE_NAME) logger.info("Wrote pom goldfile to [%s]" % pom_goldfile_path) else: pom_content = pomgen.gen(pom.PomContentType.RELEASE) pom_path = os.path.join(pom_dest_dir, "pom.xml") _write_file(pom_path, pom_content) logger.info("Wrote pom file to [%s]" % pom_path) for i, companion_pomgen in enumerate( pomgen.get_companion_generators()): pom_content = companion_pomgen.gen( pom.PomContentType.RELEASE) pom_path = os.path.join(pom_dest_dir, "pom_companion%s.xml" % i) _write_file(pom_path, pom_content) logger.info("Wrote companion pom file to [%s]" % pom_path)
def test_compute_transitive_closure__multiple_leaf_nodes(self): """ a1 references both a2 and a3 a3 has ext deps: d1, d2 a2 has ext deps d1, d3 a1 has ext deps d4 the expected transitive closure of deps are: a3: d1, d2 a2: d1, d3 a1: d4, d1, d3, d2 (a2 and a3 also, but not part of this test) """ a1_node = self._build_node("a1", "a/b/c") a2_node = self._build_node("a2", "d/e/f", parent_node=a1_node) a3_node = self._build_node("a3", "g/h/i", parent_node=a1_node) a1_node.children = ( a2_node, a3_node, ) crawler = crawlerm.Crawler(workspace=self._get_workspace(), pom_template=None) # setup 3rd party deps d1 = self._get_3rdparty_dep("com:d1:1.0.0", "d1") d2 = self._get_3rdparty_dep("com:d2:1.0.0", "d2") self._associate_dep(crawler, a3_node, (d1, d2)) d3 = self._get_3rdparty_dep("com:d3:1.0.0", "d3") self._associate_dep(crawler, a2_node, (d1, d3)) d4 = self._get_3rdparty_dep("com:d4:1.0.0", "d4") self._associate_dep(crawler, a1_node, (d4, )) # setup necessary crawler state to simulate previous crawling crawler.leafnodes = ( a2_node, a3_node, ) target_to_all_deps = crawler._compute_transitive_closures_of_deps() a3_deps = self._get_deps_for_node(a3_node, target_to_all_deps) self.assertEqual(2, len(a3_deps)) self.assertEqual(d1, a3_deps[0]) self.assertEqual(d2, a3_deps[1]) a2_deps = self._get_deps_for_node(a2_node, target_to_all_deps) self.assertEqual(2, len(a2_deps)) self.assertEqual(d1, a2_deps[0]) self.assertEqual(d3, a2_deps[1]) a1_deps = self._get_deps_for_node(a1_node, target_to_all_deps) self.assertEqual(4, len(a1_deps)) self.assertEqual(d4, a1_deps[0]) self.assertEqual(d1, a1_deps[1]) self.assertEqual(d3, a1_deps[2]) self.assertEqual(d2, a1_deps[3])
def test_compute_transitive_closure__multiple_parent_nodes(self): """ a1 references a10 a2 also references a10 a10 has ext deps: d10 a2 has ext deps d2 a1 has ext deps d1 the expected transitive closure of deps are: a10: d10 a2: d2, d10 a1: d1, d10 """ a1_node = self._build_node("a1", "d/e/f", parent_node=None) a2_node = self._build_node("a2", "g/h/i", parent_node=None) a10_node = self._build_node("a10", "a/b/c") a10_node.parents = ( a1_node, a2_node, ) a1_node.children = (a10_node, ) a2_node.children = (a10_node, ) crawler = crawlerm.Crawler(workspace=self._get_workspace(), pom_template=None) # setup 3rd party deps d1 = self._get_3rdparty_dep("com:d1:1.0.0", "d1") d2 = self._get_3rdparty_dep("com:d2:1.0.0", "d2") d10 = self._get_3rdparty_dep("com:d10:1.0.0", "d10") self._associate_dep(crawler, a10_node, (d10, )) self._associate_dep(crawler, a2_node, (d2)) self._associate_dep(crawler, a1_node, (d1, )) # setup necessary crawler state to simulate previous crawling crawler.leafnodes = (a10_node, ) target_to_all_deps = crawler._compute_transitive_closures_of_deps() a10_deps = self._get_deps_for_node(a10_node, target_to_all_deps) self.assertEqual(1, len(a10_deps)) self.assertEqual(d10, a10_deps[0]) a2_deps = self._get_deps_for_node(a2_node, target_to_all_deps) self.assertEqual(2, len(a2_deps)) self.assertEqual(d2, a2_deps[0]) self.assertEqual(d10, a2_deps[1]) a1_deps = self._get_deps_for_node(a1_node, target_to_all_deps) self.assertEqual(2, len(a1_deps)) self.assertEqual(d1, a1_deps[0]) self.assertEqual(d10, a1_deps[1])
def test_non_default_package_ref__allowed_for_skip_pom_gen_mode(self): """ lib/a2 is allowed to ref lib/a1:foo because lib/a1 has pom_gen_mode = "skip" https://github.com/salesforce/pomgen/tree/master/examples/skip-artifact-generation """ repo_root_path = tempfile.mkdtemp("monorepo") self._write_library_root(repo_root_path, "lib") self._add_artifact(repo_root_path, "lib/a1", "skip", deps=[]) self._add_artifact(repo_root_path, "lib/a2", "template", deps=["//lib/a1:foo"]) ws = workspace.Workspace(repo_root_path, "", [], exclusions.src_exclusions()) crawler = crawlerm.Crawler(ws, pom_template="") crawler.crawl(["lib/a2"])
def test_non_default_package_ref__not_allowed(self): """ lib/a2 cannot reference lib/a1:foo - only default package refs are allowed. """ repo_root_path = tempfile.mkdtemp("monorepo") self._write_library_root(repo_root_path, "lib") self._add_artifact(repo_root_path, "lib/a1", "template", deps=[]) self._add_artifact(repo_root_path, "lib/a2", "template", deps=["//lib/a1:foo"]) ws = workspace.Workspace(repo_root_path, "", [], exclusions.src_exclusions()) crawler = crawlerm.Crawler(ws, pom_template="") with self.assertRaises(Exception) as ctx: crawler.crawl(["lib/a2"]) self.assertIn("[lib/a2] can only reference [lib/a1]", str(ctx.exception))
def test_default_package_ref_explicit(self): """ lib/a2 can reference lib/a1:a1. """ repo_root_path = tempfile.mkdtemp("monorepo") self._write_library_root(repo_root_path, "lib") self._add_artifact(repo_root_path, "lib/a1", "template", deps=[]) self._add_artifact(repo_root_path, "lib/a2", "template", deps=["//lib/a1:a1"]) ws = workspace.Workspace(repo_root_path, "", [], exclusions.src_exclusions()) crawler = crawlerm.Crawler(ws, pom_template="") result = crawler.crawl(["lib/a2"]) self.assertEqual(1, len(result.nodes)) self.assertEqual("lib/a2", result.nodes[0].artifact_def.bazel_package) self.assertEqual(1, len(result.nodes[0].children)) self.assertEqual( "lib/a1", result.nodes[0].children[0].artifact_def.bazel_package)
def setUp(self): """ All tests start out with 3 libraries: A -> B -> C \-> C Each libray has 2 artifacts, a1 and a2. All references to other libraries are through the a1 artifact. The a2 artifact does not reference anything. The directory structure is: libs/<lib-root-dir>/a1/MVN-INF/BUILD.pom libs/<lib-root-dir>/a2/MVN-INF/BUILD.pom Versions: A: 1.0.0 B: 2.0.0 C: 3.0.0 Released Versions: A: 0.0.1 B: 0.0.2 C: 0.0.3 """ self.repo_root_path = tempfile.mkdtemp("monorepo") self._add_libraries(self.repo_root_path) self._setup_repo(self.repo_root_path) self._write_all_build_pom_released(self.repo_root_path) self.cwd = os.getcwd() os.chdir(self.repo_root_path) ws = workspace.Workspace(self.repo_root_path, [], exclusions.src_exclusions(), maven_install_info=maveninstallinfo.NOOP, pom_content=pomcontent.NOOP) self.crawler = crawler.Crawler(ws, pom_template="")
def test_dependencies_with_skip_mode(self): """ the artifact a1 references the artifact x1 as a dep: a1 -> x1 x1 has pom generation mode set to "skip" x1's deps are added to a1's deps """ parent_node = self._build_node("a1", "a/b/c", pom_generation_mode=pomgenmode.DYNAMIC) node = self._build_node("x1", "x/y/z", pom_generation_mode=pomgenmode.SKIP, parent_node=parent_node) parent_node.children = (node, ) crawler = crawlerm.Crawler(workspace=self._get_workspace(), pom_template=None) # setup some other deps guava = self._get_3rdparty_dep("com.google:guava:20.0", "guava") self._associate_dep(crawler, parent_node, guava) force = self._get_3rdparty_dep("com.force:common:1.0.0", "force") self._associate_dep(crawler, node, force) # setup necessary crawler state to simulate previous crawling crawler.leafnodes = (node, ) # sanity - the force dep is not references by the a1 artifact parent_node_deps = self._get_associated_deps(crawler, parent_node) self.assertNotIn(force, parent_node_deps) # run the logic that pushes deps owned by "skip" artifacts up crawler._push_transitives_to_parent() parent_node_deps = self._get_associated_deps(crawler, parent_node) self.assertEqual(2, len(parent_node_deps)) self.assertIn(guava, parent_node_deps) self.assertIn(force, parent_node_deps)
ws.dependency_metadata.get_ancestors(external_dependency) ]) ext_deps.append(attrs) if args.filter is not None: # filter AFTER building result dict so that filtering on ancestors # is possible query = instancequery.InstanceQuery(args.filter) ext_deps = query(ext_deps) print(_to_json(ext_deps)) crawl_artifact_dependencies = (args.library_release_plan_tree or args.library_release_plan_json or args.artifact_release_plan) if crawl_artifact_dependencies: crawler = crawler.Crawler(ws, cfg.pom_template, args.verbose) artifact_result = crawler.crawl(packages, force_release=args.force) root_library_nodes = libaggregator.get_libraries_to_release( artifact_result.nodes) if args.library_release_plan_tree: pretty_tree_output = "" for library_node in root_library_nodes: pretty_tree_output = "%s\n%s\n" % (pretty_tree_output, library_node.pretty_print()) print(pretty_tree_output) else: if args.library_release_plan_json: all_libs_json = [] incremental_rel_enabled = cfg.transitives_versioning_mode == "counter"
def test_compute_transitive_closure__ext_deps_some_listed_transitives( self): """ a1 references both a2 and a3 a3 has ext deps: d1, d2, t1, d3 a2 has ext deps t1, t3, d1 a1 has ext deps t3, t2, d4 ADDITIONALLY, the ext deps have the following transitives (which are not listed in the BUILD file): d1: t1, t2 d2: t3 the expected transitive closure of deps are: a3: d1, t2, d2, t3, t1, d3 a2: t1, t3, d1, t2 a1: t3, t2, d4, t1, d1, d2, d3 (a2 and a3 also, but not tested) """ a1_node = self._build_node("a1", "a/b/c") a2_node = self._build_node("a2", "d/e/f", parent_node=a1_node) a3_node = self._build_node("a3", "g/h/i", parent_node=a1_node) a1_node.children = ( a2_node, a3_node, ) ws = self._get_workspace() crawler = crawlerm.Crawler(workspace=ws, pom_template=None) # setup 3rd party deps d1 = self._get_3rdparty_dep("com:d1:1.0.0", "d1") t1 = self._get_3rdparty_dep("com:t1:1.0.0", "t1") t2 = self._get_3rdparty_dep("com:t2:1.0.0", "t2") ws.dependency_metadata.register_transitives(d1, [t1, t2]) d2 = self._get_3rdparty_dep("com:d2:1.0.0", "d2") t3 = self._get_3rdparty_dep("com:t3:1.0.0", "t3") ws.dependency_metadata.register_transitives(d2, [ t3, ]) d3 = self._get_3rdparty_dep("com:d3:1.0.0", "d3") self._associate_dep(crawler, a3_node, (d1, d2, t1, d3)) self._associate_dep(crawler, a2_node, (t1, t3, d1)) d4 = self._get_3rdparty_dep("com:d4:1.0.0", "d4") self._associate_dep(crawler, a1_node, ( t3, t2, d4, )) # setup necessary crawler state to simulate previous crawling crawler.leafnodes = ( a2_node, a3_node, ) target_to_all_deps = crawler._compute_transitive_closures_of_deps() a3_deps = self._get_deps_for_node(a3_node, target_to_all_deps) self.assertEqual(6, len(a3_deps)) self.assertEqual(d1, a3_deps[0]) self.assertEqual(t2, a3_deps[1]) self.assertEqual(d2, a3_deps[2]) self.assertEqual(t3, a3_deps[3]) self.assertEqual(t1, a3_deps[4]) self.assertEqual(d3, a3_deps[5]) a2_deps = self._get_deps_for_node(a2_node, target_to_all_deps) self.assertEqual(4, len(a2_deps)) self.assertEqual(t1, a2_deps[0]) self.assertEqual(t3, a2_deps[1]) self.assertEqual(d1, a2_deps[2]) self.assertEqual(t2, a2_deps[3]) a1_deps = self._get_deps_for_node(a1_node, target_to_all_deps) self.assertEqual(7, len(a1_deps)) self.assertEqual(t3, a1_deps[0]) self.assertEqual(t2, a1_deps[1]) self.assertEqual(d4, a1_deps[2]) self.assertEqual(t1, a1_deps[3]) self.assertEqual(d1, a1_deps[4]) self.assertEqual(d2, a1_deps[5]) self.assertEqual(d3, a1_deps[6])