Example #1
0
def test_presentation():
    """ Smoke test graph_to_string and parse_graph. """
    graph = UpdateGraph()
    print "EMPTY:"
    print graph_to_string(graph)
    print "Adding index: ", graph.add_index([
        VER_1,
    ], [
        VER_2,
    ])
    print "Adding index: ", graph.add_index([
        VER_2,
    ], [VER_3, VER_4])
    print "Adding index: ", graph.add_index([VER_3, VER_2], [
        VER_5,
    ])
    chks = fake_chks()
    graph.add_edge((-1, 0), (100, chks.next()))
    graph.add_edge((1, 2), (200, chks.next()))
    graph.add_edge((-1, 2), (500, chks.next()))
    text = graph_to_string(graph)
    print
    print text
    print
    graph1 = parse_graph(text)
    print
    text1 = graph_to_string(graph1)
    print "Round trip:"
    print text1
    assert text == text1
Example #2
0
    def _graph_request_done(self, client, msg, candidate):
        """ INTERNAL: Handle requests for the graph. """
        #print "CANDIDATE:", candidate
        #print "_graph_request_done -- ", candidate[6]
        if not candidate[6]:
            return False

        if not self.parent.ctx.graph is None:
            self.finished_candidates.append(candidate)
            return True

        if msg[0] == 'AllData':
            in_file = open(client.in_params.file_name, 'rb')
            try:
                data = in_file.read()
                # REDFLAG: log graph?
                if self.parent.params.get('DUMP_GRAPH', False):
                    self.parent.ctx.ui_.status("--- Raw Graph Data ---\n")
                    self.parent.ctx.ui_.status(data)
                    self.parent.ctx.ui_.status("\n---\n")
                graph = parse_graph(data)
                self._handle_dump_canonical_paths(graph)
                self._set_graph(graph)
                assert(not self.freenet_heads is None)
                if self.parent.ctx.has_versions(self.freenet_heads):
                    # Handle case where we are up to date but the heads list
                    # didn't fit in the top key.
                    self.parent.ctx.ui_.status('Freenet heads: %s\n' %
                                           ' '.join([ver[:12] for ver in
                                                     self.freenet_heads]))
                    self.parent.ctx.ui_.warn("All remote heads are already "
                                             + "in the local repo.\n")
                    self.parent.transition(self.success_state)
                    return True
                self._reevaluate()
            finally:
                in_file.close()
            self.finished_candidates.append(candidate)
        else:
            if not self.top_key_tuple is None:
                pending, current, next, finished = self._known_chks()
                all_chks = pending.union(current).union(next).union(finished)

                for chk in self.top_key_tuple[0]:
                    if not chk in all_chks and chk != candidate[0]:
                        # REDFLAG: Test this code path.
                        # Queue the other graph chk.
                        candidate = [chk, 0, False, None, None, None, True]
                        # Run next!
                        #print "QUEUEING OTHER GRAPH CHK"
                        # append retries immediately. Hmmm...
                        self.current_candidates.append(candidate)
                        break


        # Careful, can drive state transition.
        self._handle_graph_failure(candidate)
        return True
Example #3
0
    def _graph_request_done(self, client, msg, candidate):
        """ INTERNAL: Handle requests for the graph. """
        #print "CANDIDATE:", candidate
        #print "_graph_request_done -- ", candidate[6]
        if not candidate[6]:
            return False

        if not self.parent.ctx.graph is None:
            self.finished_candidates.append(candidate)
            return True

        if msg[0] == 'AllData':
            in_file = open(client.in_params.file_name, 'rb')
            try:
                data = in_file.read()
                # REDFLAG: log graph?
                if self.parent.params.get('DUMP_GRAPH', False):
                    self.parent.ctx.ui_.status("--- Raw Graph Data ---\n")
                    self.parent.ctx.ui_.status(data)
                    self.parent.ctx.ui_.status("\n---\n")
                graph = parse_graph(data)
                self._handle_dump_canonical_paths(graph)
                self._set_graph(graph)
                assert (not self.freenet_heads is None)
                if self.parent.ctx.has_versions(self.freenet_heads):
                    # Handle case where we are up to date but the heads list
                    # didn't fit in the top key.
                    self.parent.ctx.ui_.status(
                        'Freenet heads: %s\n' %
                        ' '.join([ver[:12] for ver in self.freenet_heads]))
                    self.parent.ctx.ui_.warn("All remote heads are already " +
                                             "in the local repo.\n")
                    self.parent.transition(self.success_state)
                    return True
                self._reevaluate()
            finally:
                in_file.close()
            self.finished_candidates.append(candidate)
        else:
            if not self.top_key_tuple is None:
                pending, current, next, finished = self._known_chks()
                all_chks = pending.union(current).union(next).union(finished)

                for chk in self.top_key_tuple[0]:
                    if not chk in all_chks and chk != candidate[0]:
                        # REDFLAG: Test this code path.
                        # Queue the other graph chk.
                        candidate = [chk, 0, False, None, None, None, True]
                        # Run next!
                        #print "QUEUEING OTHER GRAPH CHK"
                        # append retries immediately. Hmmm...
                        self.current_candidates.append(candidate)
                        break

        # Careful, can drive state transition.
        self._handle_graph_failure(candidate)
        return True
Example #4
0
def test_minimal_graph(repo_dir, version_list, file_name=None):
    """ Smoke test minimal_graph(). """
    ui_ = ui.ui()
    if file_name is None:
        graph, repo, cache = test_update_real(repo_dir, version_list, True)
        open('/tmp/latest_graph.txt', 'wb').write(graph_to_string(graph))
    else:
        repo = hg.repository(ui_, repo_dir)
        cache = BundleCache(repo, ui_, CACHE_DIR)
        cache.remove_files()
        graph = parse_graph(open(file_name, 'rb').read())
        print "--- from file: %s ---" % file_name
        print graph_to_string(graph)
    version_map = build_version_table(graph, repo)

    # Incomplete, but better than nothing.
    # Verify that the chk bounds are the same after shrinking.
    chk_bounds = {}
    initial_edges = graph.get_top_key_edges()
    for edge in initial_edges:
        chk_bounds[graph.get_chk(edge)] = (
            get_rollup_bounds(graph, repo, edge[0] + 1, edge[1], version_map))

    print "CHK BOUNDS:"
    for value in chk_bounds:
        print value
        print "  ", chk_bounds[value]
    print
    sizes = (512, 1024, 2048, 4096, 16 * 1024)
    for max_size in sizes:
        try:
            print "MAX:", max(version_map.values())
            small = minimal_graph(graph, repo, version_map, max_size)
            print "--- size == %i" % max_size
            print graph_to_string(small)

            small.rep_invariant(repo, True) # Full check
            chks = chk_bounds.keys()
            path = small.get_top_key_edges()
            print "TOP KEY EDGES:"
            print path
            for edge in path:
                # MUST rebuild the version map because the indices changed.
                new_map = build_version_table(small, repo)
                bounds = get_rollup_bounds(small, repo, edge[0] + 1,
                                           edge[1], new_map)
                print "CHK:", small.get_chk(edge)
                print "BOUNDS: ", bounds
                assert chk_bounds[small.get_chk(edge)] == bounds
                print "DELETING: ", edge, small.get_chk(edge)
                chks.remove(small.get_chk(edge))
            assert len(chks) == 0
        except UpdateGraphException, err:
            print "IGNORED: ", err
Example #5
0
def test_minimal_graph(repo_dir, version_list, file_name=None):
    """ Smoke test minimal_graph(). """
    ui_ = ui.ui()
    if file_name is None:
        graph, repo, cache = test_update_real(repo_dir, version_list, True)
        open('/tmp/latest_graph.txt', 'wb').write(graph_to_string(graph))
    else:
        repo = hg.repository(ui_, repo_dir)
        cache = BundleCache(repo, ui_, CACHE_DIR)
        cache.remove_files()
        graph = parse_graph(open(file_name, 'rb').read())
        print "--- from file: %s ---" % file_name
        print graph_to_string(graph)
    version_map = build_version_table(graph, repo)

    # Incomplete, but better than nothing.
    # Verify that the chk bounds are the same after shrinking.
    chk_bounds = {}
    initial_edges = graph.get_top_key_edges()
    for edge in initial_edges:
        chk_bounds[graph.get_chk(edge)] = (get_rollup_bounds(
            graph, repo, edge[0] + 1, edge[1], version_map))

    print "CHK BOUNDS:"
    for value in chk_bounds:
        print value
        print "  ", chk_bounds[value]
    print
    sizes = (512, 1024, 2048, 4096, 16 * 1024)
    for max_size in sizes:
        try:
            print "MAX:", max(version_map.values())
            small = minimal_graph(graph, repo, version_map, max_size)
            print "--- size == %i" % max_size
            print graph_to_string(small)

            small.rep_invariant(repo, True)  # Full check
            chks = chk_bounds.keys()
            path = small.get_top_key_edges()
            print "TOP KEY EDGES:"
            print path
            for edge in path:
                # MUST rebuild the version map because the indices changed.
                new_map = build_version_table(small, repo)
                bounds = get_rollup_bounds(small, repo, edge[0] + 1, edge[1],
                                           new_map)
                print "CHK:", small.get_chk(edge)
                print "BOUNDS: ", bounds
                assert chk_bounds[small.get_chk(edge)] == bounds
                print "DELETING: ", edge, small.get_chk(edge)
                chks.remove(small.get_chk(edge))
            assert len(chks) == 0
        except UpdateGraphException, err:
            print "IGNORED: ", err
Example #6
0
    def leave(self, to_state):
        """ Implementation of State virtual. """
        if to_state.name == self.success_state:
            # Set the graph from the result
            graph = None
            for candidate in self.ordered:
                result = candidate[5]
                if not result is None and result[0] == 'AllData':
                    graph = parse_graph(result[2])

            assert not graph is None

            self.parent.ctx.graph = graph

            # Allow pending requests to run to completion.
            for tag in self.pending:
                request = self.pending[tag]
                request.tag = "orphaned_%s_%s" % (str(request.tag), self.name)
                assert not request.tag in self.parent.ctx.orphaned
                self.parent.ctx.orphaned[request.tag] = request
            self.pending.clear()
Example #7
0
    def leave(self, to_state):
        """ Implementation of State virtual. """
        if to_state.name == self.success_state:
            # Set the graph from the result
            graph = None
            for candidate in self.ordered:
                result = candidate[5]
                if not result is None and result[0] == 'AllData':
                    graph = parse_graph(result[2])

            assert not graph is None

            self.parent.ctx.graph = graph

            # Allow pending requests to run to completion.
            for tag in self.pending:
                request = self.pending[tag]
                request.tag = "orphaned_%s_%s" % (str(request.tag), self.name)
                assert not request.tag in self.parent.ctx.orphaned
                self.parent.ctx.orphaned[request.tag] = request
            self.pending.clear()
Example #8
0
def test_presentation():
    """ Smoke test graph_to_string and parse_graph. """
    graph = UpdateGraph()
    print "EMPTY:"
    print graph_to_string(graph)
    print "Adding index: ", graph.add_index([VER_1, ], [VER_2, ])
    print "Adding index: ", graph.add_index([VER_2, ], [VER_3, VER_4])
    print "Adding index: ", graph.add_index([VER_3, VER_2], [VER_5, ])
    chks = fake_chks()
    graph.add_edge((-1, 0), (100, chks.next()))
    graph.add_edge((1, 2), (200, chks.next()))
    graph.add_edge((-1, 2), (500, chks.next()))
    text = graph_to_string(graph)
    print
    print text
    print
    graph1 = parse_graph(text)
    print
    text1 = graph_to_string(graph1)
    print "Round trip:"
    print text1
    assert text == text1