def get_top_key_updates(graph, repo, version_table=None): """ Returns the update tuples needed to build the top key.""" graph.rep_invariant() edges = graph.get_top_key_edges() coalesced_edges = [] ordinals = {} for edge in edges: assert edge[2] >= 0 and edge[2] < 2 assert edge[2] == 0 or (edge[0], edge[1], 0) in edges ordinal = ordinals.get(edge[:2]) if ordinal is None: ordinal = 0 coalesced_edges.append(edge[:2]) ordinals[edge[:2]] = max(ordinal, edge[2]) if version_table is None: version_table = build_version_table(graph, repo) ret = [] for edge in coalesced_edges: parents, latest = get_rollup_bounds(graph, repo, edge[0] + 1, edge[1], version_table) length = graph.get_length(edge) assert len(graph.edge_table[edge][1:]) > 0 #(length, parent_rev, latest_rev, (CHK, ...)) update = (length, parents, latest, graph.edge_table[edge][1:], True, True) ret.append(update) # Stuff additional remote heads into first update. result = get_rollup_bounds(graph, repo, 0, graph.latest_index, version_table) for head in ret[0][2]: if not head in result[1]: print "Expected head not in all_heads!", head[:12] assert False #top_update = list(ret[0]) #top_update[2] = tuple(all_heads) #ret[0] = tuple(top_update) ret[0] = list(ret[0]) ret[0][2] = tuple(result[1]) ret[0] = tuple(ret[0]) return ret
def test_minimal_graph(repo_dir, version_list, file_name=None): """ Smoke test minimal_graph(). """ ui_ = ui.ui() if file_name is None: graph, repo, cache = test_update_real(repo_dir, version_list, True) open('/tmp/latest_graph.txt', 'wb').write(graph_to_string(graph)) else: repo = hg.repository(ui_, repo_dir) cache = BundleCache(repo, ui_, CACHE_DIR) cache.remove_files() graph = parse_graph(open(file_name, 'rb').read()) print "--- from file: %s ---" % file_name print graph_to_string(graph) version_map = build_version_table(graph, repo) # Incomplete, but better than nothing. # Verify that the chk bounds are the same after shrinking. chk_bounds = {} initial_edges = graph.get_top_key_edges() for edge in initial_edges: chk_bounds[graph.get_chk(edge)] = ( get_rollup_bounds(graph, repo, edge[0] + 1, edge[1], version_map)) print "CHK BOUNDS:" for value in chk_bounds: print value print " ", chk_bounds[value] print sizes = (512, 1024, 2048, 4096, 16 * 1024) for max_size in sizes: try: print "MAX:", max(version_map.values()) small = minimal_graph(graph, repo, version_map, max_size) print "--- size == %i" % max_size print graph_to_string(small) small.rep_invariant(repo, True) # Full check chks = chk_bounds.keys() path = small.get_top_key_edges() print "TOP KEY EDGES:" print path for edge in path: # MUST rebuild the version map because the indices changed. new_map = build_version_table(small, repo) bounds = get_rollup_bounds(small, repo, edge[0] + 1, edge[1], new_map) print "CHK:", small.get_chk(edge) print "BOUNDS: ", bounds assert chk_bounds[small.get_chk(edge)] == bounds print "DELETING: ", edge, small.get_chk(edge) chks.remove(small.get_chk(edge)) assert len(chks) == 0 except UpdateGraphException, err: print "IGNORED: ", err
def test_minimal_graph(repo_dir, version_list, file_name=None): """ Smoke test minimal_graph(). """ ui_ = ui.ui() if file_name is None: graph, repo, cache = test_update_real(repo_dir, version_list, True) open('/tmp/latest_graph.txt', 'wb').write(graph_to_string(graph)) else: repo = hg.repository(ui_, repo_dir) cache = BundleCache(repo, ui_, CACHE_DIR) cache.remove_files() graph = parse_graph(open(file_name, 'rb').read()) print "--- from file: %s ---" % file_name print graph_to_string(graph) version_map = build_version_table(graph, repo) # Incomplete, but better than nothing. # Verify that the chk bounds are the same after shrinking. chk_bounds = {} initial_edges = graph.get_top_key_edges() for edge in initial_edges: chk_bounds[graph.get_chk(edge)] = (get_rollup_bounds( graph, repo, edge[0] + 1, edge[1], version_map)) print "CHK BOUNDS:" for value in chk_bounds: print value print " ", chk_bounds[value] print sizes = (512, 1024, 2048, 4096, 16 * 1024) for max_size in sizes: try: print "MAX:", max(version_map.values()) small = minimal_graph(graph, repo, version_map, max_size) print "--- size == %i" % max_size print graph_to_string(small) small.rep_invariant(repo, True) # Full check chks = chk_bounds.keys() path = small.get_top_key_edges() print "TOP KEY EDGES:" print path for edge in path: # MUST rebuild the version map because the indices changed. new_map = build_version_table(small, repo) bounds = get_rollup_bounds(small, repo, edge[0] + 1, edge[1], new_map) print "CHK:", small.get_chk(edge) print "BOUNDS: ", bounds assert chk_bounds[small.get_chk(edge)] == bounds print "DELETING: ", edge, small.get_chk(edge) chks.remove(small.get_chk(edge)) assert len(chks) == 0 except UpdateGraphException, err: print "IGNORED: ", err
def get_top_key_updates(graph, repo, version_table=None): """ Returns the update tuples needed to build the top key.""" graph.rep_invariant() edges = graph.get_top_key_edges() coalesced_edges = [] ordinals = {} for edge in edges: assert edge[2] >= 0 and edge[2] < 2 assert edge[2] == 0 or (edge[0], edge[1], 0) in edges ordinal = ordinals.get(edge[:2]) if ordinal is None: ordinal = 0 coalesced_edges.append(edge[:2]) ordinals[edge[:2]] = max(ordinal, edge[2]) if version_table is None: version_table = build_version_table(graph, repo) ret = [] for edge in coalesced_edges: parents, latest = get_rollup_bounds(graph, repo, edge[0] + 1, edge[1], version_table) length = graph.get_length(edge) assert len(graph.edge_table[edge][1:]) > 0 #(length, parent_rev, latest_rev, (CHK, ...)) update = (length, parents, latest, graph.edge_table[edge][1:], True, True) ret.append(update) # Stuff additional remote heads into first update. result = get_rollup_bounds(graph, repo, 0, graph.latest_index, version_table) for head in ret[0][2]: if not head in result[1]: print "Expected head not in all_heads!", head[:12] assert False #top_update = list(ret[0]) #top_update[2] = tuple(all_heads) #ret[0] = tuple(top_update) ret[0] = list(ret[0]) ret[0][2] = tuple(result[1]) ret[0] = tuple(ret[0]) return ret
def make_bundle(self, graph, version_table, index_pair, out_file=None): """ Create an hg bundle file corresponding to the edge in graph. """ #print "INDEX_PAIR:", index_pair assert not index_pair is None self.graph = graph cached = self.get_cached_bundle(index_pair, out_file) if not cached is None: #print "make_bundle -- cache hit: ", index_pair return cached delete_out_file = out_file is None if out_file is None: out_file = make_temp_file(self.base_dir) try: parents, heads = get_rollup_bounds( self.graph, self.repo, index_pair[0] + 1, # INCLUSIVE index_pair[1], version_table) # Hmmm... ok to suppress mercurial noise here. self.ui_.pushbuffer() try: #print 'PARENTS:', list(parents) #print 'HEADS:', list(heads) commands.bundle(self.ui_, self.repo, out_file, None, base=list(parents), rev=list(heads)) finally: self.ui_.popbuffer() if self.enabled: self.update_cache(index_pair, out_file) file_field = None if not delete_out_file: file_field = out_file return (os.path.getsize(out_file), file_field, index_pair) finally: if delete_out_file and os.path.exists(out_file): os.remove(out_file)
def make_bundle(self, graph, version_table, index_pair, out_file=None): """ Create an hg bundle file corresponding to the edge in graph. """ # print "INDEX_PAIR:", index_pair assert not index_pair is None self.graph = graph cached = self.get_cached_bundle(index_pair, out_file) if not cached is None: # print "make_bundle -- cache hit: ", index_pair return cached delete_out_file = out_file is None if out_file is None: out_file = make_temp_file(self.base_dir) try: parents, heads = get_rollup_bounds( self.graph, self.repo, index_pair[0] + 1, index_pair[1], version_table # INCLUSIVE ) # Hmmm... ok to suppress mercurial noise here. self.ui_.pushbuffer() try: # print 'PARENTS:', list(parents) # print 'HEADS:', list(heads) commands.bundle(self.ui_, self.repo, out_file, None, base=list(parents), rev=list(heads)) finally: self.ui_.popbuffer() if self.enabled: self.update_cache(index_pair, out_file) file_field = None if not delete_out_file: file_field = out_file return (os.path.getsize(out_file), file_field, index_pair) finally: if delete_out_file and os.path.exists(out_file): os.remove(out_file)
def test_rollup(): """ Smoke test get_rollup_bounds(). """ repo, ui_ = setup_rollup_test_repo(TST_REPO_DIR) dump_changesets(repo) cache = BundleCache(repo, ui_, CACHE_DIR) cache.remove_files() graph = UpdateGraph() chks = fake_chks() # 0 Single changeset edges = graph.update(repo, ui_, ['716c293192c7', ], cache) set_chks(graph, edges, chks) # 1 Multiple changesets edges = graph.update(repo, ui_, ['076aec9f34c9', ], cache) set_chks(graph, edges, chks) # 2 Multiple heads, single base edges = graph.update(repo, ui_, ['62a72a238ffc', '4409936ef21f'], cache) set_chks(graph, edges, chks) # 3 Multiple bases, single head edges = graph.update(repo, ui_, ['a2c749d99d54', ], cache) set_chks(graph, edges, chks) # 4 edges = graph.update(repo, ui_, ['f6248cd464e3', ], cache) set_chks(graph, edges, chks) # 5 edges = graph.update(repo, ui_, ['fd1e6832820b', ], cache) set_chks(graph, edges, chks) # 6 edges = graph.update(repo, ui_, ['7429bf7b11f5', ], cache) set_chks(graph, edges, chks) # 7 edges = graph.update(repo, ui_, ['fcc2e90dbf0d', ], cache) set_chks(graph, edges, chks) # 8 edges = graph.update(repo, ui_, ['03c047d036ca', ], cache) set_chks(graph, edges, chks) # 9 edges = graph.update(repo, ui_, ['2f6c65f64ce5', ], cache) set_chks(graph, edges, chks) print print graph_to_string(graph) version_map = build_version_table(graph, repo) dump_version_map(version_map) assert version_map == EXPECTED_VERSION_MAP graph.rep_invariant(repo, True) # Verify contiguousness. print "From earliest..." for index in range(0, graph.latest_index + 1): parents, heads = get_rollup_bounds(graph, repo, 0, index, version_map) print "(%i->%i): %s" % (0, index, versions_str(heads)) print " ", versions_str(parents) print "To latest..." for index in range(0, graph.latest_index + 1): parents, heads = get_rollup_bounds(graph, repo, index, graph.latest_index, version_map) print "(%i->%i): %s" % (index, graph.latest_index, versions_str(heads)) print " ", versions_str(parents) # Empty try: get_rollup_bounds(graph, repo, FIRST_INDEX, FIRST_INDEX, version_map) except AssertionError: # Asserted as expected for to_index == FIRST_INDEX print "Got expected assertion." # Rollup of one changeset index. result = get_rollup_bounds(graph, repo, 0, 0, version_map) check_result(result, (('000000000000', ), ('716c293192c7',))) # Rollup of multiple changeset index. result = get_rollup_bounds(graph, repo, 1, 1, version_map) check_result(result, (('716c293192c7', ), ('076aec9f34c9',))) # Rollup of with multiple heads. result = get_rollup_bounds(graph, repo, 1, 2, version_map) check_result(result, (('716c293192c7', ), ('4409936ef21f','62a72a238ffc'))) # Rollup of with multiple bases. result = get_rollup_bounds(graph, repo, 3, 4, version_map) check_result(result, (('4409936ef21f', '62a72a238ffc', ), ('f6248cd464e3',))) # Rollup with head pulled in from earlier base. result = get_rollup_bounds(graph, repo, 3, 8, version_map) print result check_result(result, (('4409936ef21f', '62a72a238ffc', ), ('03c047d036ca', '7429bf7b11f5'))) # Rollup after remerge to a single head. result = get_rollup_bounds(graph, repo, 0, 9, version_map) print result check_result(result, (('000000000000', ), ('2f6c65f64ce5', )))
def test_rollup(): """ Smoke test get_rollup_bounds(). """ repo, ui_ = setup_rollup_test_repo(TST_REPO_DIR) dump_changesets(repo) cache = BundleCache(repo, ui_, CACHE_DIR) cache.remove_files() graph = UpdateGraph() chks = fake_chks() # 0 Single changeset edges = graph.update(repo, ui_, [ '716c293192c7', ], cache) set_chks(graph, edges, chks) # 1 Multiple changesets edges = graph.update(repo, ui_, [ '076aec9f34c9', ], cache) set_chks(graph, edges, chks) # 2 Multiple heads, single base edges = graph.update(repo, ui_, ['62a72a238ffc', '4409936ef21f'], cache) set_chks(graph, edges, chks) # 3 Multiple bases, single head edges = graph.update(repo, ui_, [ 'a2c749d99d54', ], cache) set_chks(graph, edges, chks) # 4 edges = graph.update(repo, ui_, [ 'f6248cd464e3', ], cache) set_chks(graph, edges, chks) # 5 edges = graph.update(repo, ui_, [ 'fd1e6832820b', ], cache) set_chks(graph, edges, chks) # 6 edges = graph.update(repo, ui_, [ '7429bf7b11f5', ], cache) set_chks(graph, edges, chks) # 7 edges = graph.update(repo, ui_, [ 'fcc2e90dbf0d', ], cache) set_chks(graph, edges, chks) # 8 edges = graph.update(repo, ui_, [ '03c047d036ca', ], cache) set_chks(graph, edges, chks) # 9 edges = graph.update(repo, ui_, [ '2f6c65f64ce5', ], cache) set_chks(graph, edges, chks) print print graph_to_string(graph) version_map = build_version_table(graph, repo) dump_version_map(version_map) assert version_map == EXPECTED_VERSION_MAP graph.rep_invariant(repo, True) # Verify contiguousness. print "From earliest..." for index in range(0, graph.latest_index + 1): parents, heads = get_rollup_bounds(graph, repo, 0, index, version_map) print "(%i->%i): %s" % (0, index, versions_str(heads)) print " ", versions_str(parents) print "To latest..." for index in range(0, graph.latest_index + 1): parents, heads = get_rollup_bounds(graph, repo, index, graph.latest_index, version_map) print "(%i->%i): %s" % (index, graph.latest_index, versions_str(heads)) print " ", versions_str(parents) # Empty try: get_rollup_bounds(graph, repo, FIRST_INDEX, FIRST_INDEX, version_map) except AssertionError: # Asserted as expected for to_index == FIRST_INDEX print "Got expected assertion." # Rollup of one changeset index. result = get_rollup_bounds(graph, repo, 0, 0, version_map) check_result(result, (('000000000000', ), ('716c293192c7', ))) # Rollup of multiple changeset index. result = get_rollup_bounds(graph, repo, 1, 1, version_map) check_result(result, (('716c293192c7', ), ('076aec9f34c9', ))) # Rollup of with multiple heads. result = get_rollup_bounds(graph, repo, 1, 2, version_map) check_result(result, (('716c293192c7', ), ('4409936ef21f', '62a72a238ffc'))) # Rollup of with multiple bases. result = get_rollup_bounds(graph, repo, 3, 4, version_map) check_result(result, (( '4409936ef21f', '62a72a238ffc', ), ('f6248cd464e3', ))) # Rollup with head pulled in from earlier base. result = get_rollup_bounds(graph, repo, 3, 8, version_map) print result check_result(result, (( '4409936ef21f', '62a72a238ffc', ), ('03c047d036ca', '7429bf7b11f5'))) # Rollup after remerge to a single head. result = get_rollup_bounds(graph, repo, 0, 9, version_map) print result check_result(result, (('000000000000', ), ('2f6c65f64ce5', )))