Exemplo n.º 1
0
def test_merge_no_preserve():
    """
    Test for merging graphs, overwriting conflicting properties
    """
    pt1 = PandasTransformer()
    pt1.parse(os.path.join(resource_dir, 'merge', 'nodes1.tsv'),
              input_format='tsv')
    pt1.parse(os.path.join(resource_dir, 'merge', 'edges1.tsv'),
              input_format='tsv')
    pt2 = PandasTransformer()
    pt2.parse(os.path.join(resource_dir, 'merge', 'nodes2.tsv'),
              input_format='tsv')
    pt2.parse(os.path.join(resource_dir, 'merge', 'edges2.tsv'),
              input_format='tsv')
    merged_graph = merge_all_graphs([pt1.graph, pt2.graph], preserve=False)
    assert len(merged_graph.nodes()) == 6
    assert len(merged_graph.edges()) == 8

    x1 = merged_graph.nodes['x1']
    print(x1)
    assert x1['name'] == 'node x1'

    assert isinstance(x1['category'], list)
    assert list(pt1.graph.nodes['x1']['category'])[0] in x1['category']
    assert list(pt2.graph.nodes['x1']['category'])[0] in x1['category']
    assert x1['p1'] == 'a'
Exemplo n.º 2
0
def test_merge():
    """
    Test for merging graphs
    """
    pt1 = PandasTransformer()
    pt1.parse(os.path.join(resource_dir, 'merge', 'nodes1.tsv'),
              input_format='tsv')
    pt1.parse(os.path.join(resource_dir, 'merge', 'edges1.tsv'),
              input_format='tsv')
    pt2 = PandasTransformer()
    pt2.parse(os.path.join(resource_dir, 'merge', 'nodes2.tsv'),
              input_format='tsv')
    pt2.parse(os.path.join(resource_dir, 'merge', 'edges2.tsv'),
              input_format='tsv')
    merged_graph = merge_all_graphs([pt1.graph, pt2.graph], preserve=True)
    assert len(merged_graph.nodes()) == 6
    assert len(merged_graph.edges()) == 8

    x1 = merged_graph.nodes['x1']
    assert x1['name'] == 'node x1'

    assert isinstance(x1['category'], list)
    assert 'a' in x1['p1']
    assert '1' in x1['p1']

    x10 = merged_graph.nodes['x10']
    assert x10['id'] == 'x10'
    assert x10['name'] == 'node x10'
Exemplo n.º 3
0
def test_merge_all_graphs():
    # TODO: Make a random graph generator
    graphs = get_graphs()
    # merge while preserving conflicting nodes and edges
    merged_graph = merge_all_graphs(graphs, preserve=True)
    assert merged_graph.number_of_nodes() == 6
    assert merged_graph.number_of_edges() == 6
    assert merged_graph.name == 'Graph 2'

    data = merged_graph.nodes['A']
    assert data['name'] == 'Node A'
    assert data['description'] == 'Node A in Graph 2'

    edges = merged_graph.get_edge_data('B', 'A')
    assert len(edges) == 2

    data = list(edges.values())[0]
    assert len(data['provided_by']) == 2
    assert data['provided_by'] == ['Graph 2', 'Graph 1']


    graphs = get_graphs()
    # merge while not preserving conflicting nodes and edges
    merged_graph = merge_all_graphs(graphs, preserve=False)
    assert merged_graph.number_of_nodes() == 6
    assert merged_graph.number_of_edges() == 6
    assert merged_graph.name == 'Graph 2'

    data = merged_graph.nodes['A']
    assert data['name'] == 'Node A'
    assert data['description'] == 'Node A in Graph 2'

    edges = merged_graph.get_edge_data('B', 'A')
    assert len(edges) == 2

    data = list(edges.values())[0]
    assert isinstance(data['provided_by'], str)
    assert data['provided_by'] == 'Graph 1'
Exemplo n.º 4
0
def load_and_merge(config: dict, load_config):
    """
    Load nodes and edges from files and KGs, as defined in a config YAML, and merge them into a single graph.
    The merge happens in-memory. This merged graph can then be written to a local/remote Neo4j instance
    OR be serialized into a file.
    \f

    .. note::
        Everything here is driven by the ``load_config`` YAML.

    Parameters
    ----------
    """
    with open(load_config, 'r') as YML:
        cfg = yaml.load(YML, Loader=yaml.FullLoader)

    transformers = []
    for key in cfg['target']:
        target = cfg['target'][key]
        logging.info("Loading {}".format(key))
        if target['type'] in get_file_types():
            # loading from a file
            transformer = get_transformer(target['type'])()
            if target['type'] in {'tsv', 'neo4j'}:
                # currently supporting filters only for TSV and Neo4j
                if 'filters' in target:
                    filters = target['filters']
                    node_filters = filters[
                        'node_filters'] if 'node_filters' in filters else {}
                    edge_filters = filters[
                        'edge_filters'] if 'edge_filters' in filters else {}
                    for k, v in node_filters.items():
                        transformer.set_node_filter(k, set(v))
                    for k, v in edge_filters.items():
                        transformer.set_edge_filter(k, set(v))
                    logging.info(f"with node filters: {node_filters}")
                    logging.info(f"with edge filters: {edge_filters}")
            for f in target['filename']:
                transformer.parse(f, input_format=target['type'])
            transformers.append(transformer)
        elif target['type'] == 'neo4j':
            transformer = kgx.NeoTransformer(None, target['uri'],
                                             target['username'],
                                             target['password'])
            if 'filters' in target:
                filters = target['filters']
                node_filters = filters[
                    'node_filters'] if 'node_filters' in filters else {}
                edge_filters = filters[
                    'edge_filters'] if 'edge_filters' in filters else {}
                for k, v in node_filters.items():
                    transformer.set_node_filter(k, set(v))
                for k, v in edge_filters.items():
                    transformer.set_edge_filter(k, set(v))
                logging.info(f"with node filters: {node_filters}")
                logging.info(f"with edge filters: {edge_filters}")
            transformer.load()
            transformers.append(transformer)
        else:
            logging.error(
                "type {} not yet supported for KGX load-and-merge operation.".
                format(target['type']))

    merged_graph = merge_all_graphs([x.graph for x in transformers])

    destination = cfg['destination']
    if destination['type'] in ['csv', 'tsv', 'ttl', 'json', 'tar']:
        destination_transformer = get_transformer(
            destination['type'])(merged_graph)
        destination_transformer.save(destination['filename'])
    elif destination['type'] == 'neo4j':
        destination_transformer = kgx.NeoTransformer(
            merged_graph,
            uri=destination['uri'],
            username=destination['username'],
            password=destination['password'])
        destination_transformer.save()
    else:
        logging.error(
            "type {} not yet supported for KGX load-and-merge operation.".
            format(destination['type']))
Exemplo n.º 5
0
def load_and_merge(yaml_file: str) -> nx.MultiDiGraph:
    """Load and merge sources defined in the config YAML.

    Args:
        yaml_file: A string pointing to a KGX compatible config YAML.

    Returns:
        networkx.MultiDiGraph: The merged graph.

    """
    config = parse_load_config(yaml_file)
    transformers: List = []

    # make sure all files exist before we start load
    for key in config['target']:
        target = config['target'][key]
        logging.info("Checking that file exist for {}".format(key))
        if target['type'] in get_file_types():
            for f in target['filename']:
                if not os.path.exists(f) or not os.path.isfile(f):
                    raise FileNotFoundError(
                        "File {} for transform {}  in yaml file {} "
                        "doesn't exist! Dying.", f, key, yaml_file)

    # read all the sources defined in the YAML
    for key in config['target']:
        target = config['target'][key]
        logging.info("Loading {}".format(key))
        if target['type'] in get_file_types():
            # loading from a file
            try:
                transformer = get_transformer(target['type'])()
                if target['type'] in {'tsv', 'neo4j'}:
                    if 'filters' in target:
                        apply_filters(target, transformer)
                for f in target['filename']:
                    transformer.parse(f, input_format='tsv')
                    transformer.graph.name = key
                if 'operations' in target:
                    apply_operations(target, transformer)
                transformers.append(transformer)
            except:
                logging.error("Failed loading {}".format(f))
        elif target['type'] == 'neo4j':
            transformer = NeoTransformer(None, target['uri'],
                                         target['username'],
                                         target['password'])
            if 'filters' in target:
                apply_filters(target, transformer)
            transformer.load()
            if 'operations' in target:
                apply_operations(target, transformer)
            transformers.append(transformer)
            transformer.graph.name = key
        else:
            logging.error("type {} not yet supported".format(target['type']))
        stats_filename = f"{key}_stats.yaml"
        generate_graph_stats(transformer.graph, key, stats_filename)

    # merge all subgraphs into a single graph
    merged_graph = merge_all_graphs([x.graph for x in transformers])
    merged_graph.name = 'merged_graph'
    generate_graph_stats(merged_graph, merged_graph.name,
                         "merged_graph_stats.yaml", ['provided_by'],
                         ['provided_by'])

    # write the merged graph
    if 'destination' in config:
        for _, destination in config['destination'].items():
            if destination['type'] == 'neo4j':
                destination_transformer = NeoTransformer(
                    merged_graph,
                    uri=destination['uri'],
                    username=destination['username'],
                    password=destination['password'])
                destination_transformer.save()
            elif destination['type'] in get_file_types():
                destination_transformer = get_transformer(
                    destination['type'])(merged_graph)
                mode = 'w:gz' if destination['type'] in {'tsv'} else None
                if destination['type'] in {'nt', 'nt.gz', 'ttl'}:
                    destination_transformer.set_property_types(PROPERTY_TYPES)
                destination_transformer.save(destination['filename'],
                                             output_format=destination['type'],
                                             mode=mode)
            else:
                logging.error(
                    "type {} not yet supported for KGX load-and-merge operation."
                    .format(destination['type']))

    return merged_graph