Exemplo n.º 1
0
def test_mapcat():
    assert list(mapcat(identity, [[1, 2, 3], [4, 5, 6]])) == [1, 2, 3, 4, 5, 6]

    assert list(mapcat(reversed, [[3, 2, 1, 0], [6, 5, 4], [9, 8, 7]])) == list(range(10))

    inc = lambda i: i + 1
    assert [4, 5, 6, 7, 8, 9] == list(mapcat(partial(map, inc), [[3, 4, 5], [6, 7, 8]]))
Exemplo n.º 2
0
def test_mapcat():
    assert list(mapcat(identity, [[1, 2, 3], [4, 5, 6]])) == [1, 2, 3, 4, 5, 6]

    assert list(mapcat(
        reversed, [[3, 2, 1, 0], [6, 5, 4], [9, 8, 7]])) == list(range(10))

    inc = lambda i: i + 1
    assert [4, 5, 6, 7, 8,
            9] == list(mapcat(partial(map, inc), [[3, 4, 5], [6, 7, 8]]))
Exemplo n.º 3
0
    def __init__(
        self,
        graph: nx.Graph,
        window_size: int,
        walk_length: int,
        dataset_size: int,
        forward_lookup_persona: Dict[Hashable, int],
        forward_lookup: Dict[Hashable, int],
    ) -> None:
        """
        Create a PyTorch dataset suitable for training the Splitter model; this takes a persona graph, a window size,
        and a walk length as its core parameters, and creates random walks from which training data can be generated.
        The training data are generated on demand so the order is not deterministic, once generated they are cached
        in memory.

        :param graph: persona graph
        :param window_size: number of nodes to the left and to the right for the skip-gram model
        :param walk_length: length of the random walks generated
        :param dataset_size: overall size of the dataset
        :param forward_lookup_persona: lookup from persona node to index
        :param forward_lookup: lookup from original graph node to index
        """
        super(PersonaDeepWalkDataset, self).__init__()
        self.graph = graph
        self.window_size = window_size
        self.walk_length = walk_length
        self.forward_lookup_persona = forward_lookup_persona
        self.forward_lookup = forward_lookup
        self.dataset_size = dataset_size
        # the walker is an infinite iterable that yields new training samples; safe to call next on this
        self.walker = mapcat(
            partial(iter_skip_window_walk, window_size=window_size),
            iter_random_walks(graph, walk_length),
        )
Exemplo n.º 4
0
def flatmap(f: Callable[[A_in], Seq[B_out]], fa: Seq[A_in]) -> Seq[B_out]:
    """
    Feed value in context `(Seq[A])` into a function that takes a normal
    value and returns a value in a context `(A -> Seq[B])`.

    >>> def f(x: int) -> Seq[str]:
    ...     return str(x), str(x)

    >>> flatmap(f, tuple())
    ()
    >>> flatmap(f, (1, 2, 3))
    ('1', '1', '2', '2', '3', '3')
    """
    return seq(mapcat(f, fa))