Esempio n. 1
0
def process_args(args):
    data = cb.load_node(args.input)
    with open(args.schema, 'r') as f:
        schema_json = f.read()

    g = conduit.Generator(schema_json, 'json')
    schema = conduit.Node()
    g.walk_external(schema)

    samples = data.child_names()

    # Walk through all the samples and create a unified list (ie pack into a
    # dictionary of lists)
    all_dict = {}
    for s in samples:
        unfiltered_node = data[s]
        filtered_node = conduit.Node()
        g.walk_external(filtered_node)
        filtered_node.update_compatible(unfiltered_node)
        make_data_array_dict(all_dict, filtered_node)

    # Save according to output extension, either numpy or conduit-compatible
    protocol = cb.determine_protocol(args.output)
    if protocol == 'npz':
        np.savez(args.output, **all_dict)
    else:
        n = conduit.Node()
        g.walk_external(n)
        n.update_compatible(data)
        for data_name in all_dict.keys():
            n[data_name] = np.array(all_dict[data_name])
        cb.dump_node(n, args.output)
Esempio n. 2
0
 def test_json_generator_pure_yaml(self):
     BEGIN_EXAMPLE("t_py_json_generator_pure_yaml")
     g = conduit.Generator("test: 100.0", "yaml")
     n = conduit.Node()
     g.walk(n)
     print(n["test"])
     print(n)
     END_EXAMPLE("t_py_json_generator_pure_yaml")
Esempio n. 3
0
 def test_json_generator_std(self):
     BEGIN_EXAMPLE("t_py_json_generator_std")
     g = conduit.Generator("{test: {dtype: float64, value: 100.0}}",
                           "conduit_json")
     n = conduit.Node()
     g.walk(n)
     print(n["test"])
     print(n)
     END_EXAMPLE("t_py_json_generator_std")
Esempio n. 4
0
 def test_json_generator_pure_json(self):
     BEGIN_EXAMPLE("py_json_generator_pure_json")
     g = conduit.Generator("{test: 100.0}",
                           "json")
     n = conduit.Node()
     g.walk(n)
     print(n["test"])
     print(n)
     END_EXAMPLE("py_json_generator_pure_json")
 def test_002_json_generator_pure_json(self):
     echo_src("begin",inspect.stack()[0][3],inspect.currentframe().f_lineno)
     
     g = conduit.Generator("{test: 100.0}",
                           "json")
     n = conduit.Node()
     g.walk(n)
     print(n["test"])
     print(n)
     
     echo_src("end",inspect.stack()[0][3],inspect.currentframe().f_lineno)
 def test_001_json_generator_std(self):
     echo_src("begin",inspect.stack()[0][3],inspect.currentframe().f_lineno)
     
     g = conduit.Generator("{test: {dtype: float64, value: 100.0}}",
                           "conduit_json")
     n = conduit.Node()
     g.walk(n)
     print(n["test"])
     print(n)
     
     echo_src("end",inspect.stack()[0][3],inspect.currentframe().f_lineno)
Esempio n. 7
0
def run(_input, output, schema):
    print(WARN)
    protocol = cb.determine_protocol(output)
    # Faster loader, just read metadata
    data_loader = cb.load_node_handle(_input)
    first_data = conduit.Node()
    data_loader.read(first_data, data_loader.list_child_names()[0])
    if schema == "auto":
        schema_json = first_data.to_json()
    elif "," in schema:
        sub_list = schema.split(",")
        schema_node = conduit.Node()
        for item in sub_list:
            schema_node[item] = first_data[item]
        schema_json = schema_node.to_json()
    else:
        with open(schema, "r") as f:
            schema_json = f.read()

    g = conduit.Generator(schema_json, "json")
    schema = conduit.Node()
    g.walk_external(schema)

    data_paths = []
    for path, _ in generate_scalar_path_pairs(schema):
        data_paths.append(path)
    samples = data_loader.list_child_names()

    # Walk through all the samples and create a unified list (ie pack into a
    # dictionary of lists)
    all_dict = {}
    for s in samples:
        filtered_node = conduit.Node()
        for path in data_paths:
            sample_path = "/".join((s, path))
            if data_loader.has_path(sample_path):
                data_loader.read(filtered_node[path], sample_path)
            else:
                filtered_node[
                    sample_path] = np.nan  # if a value is missing, that could be a problem
        make_data_array_dict(all_dict, filtered_node)

    for dat in all_dict.keys():
        all_dict[dat] = np.vstack(all_dict[dat])
    # Save according to output extension, either numpy or conduit-compatible
    if protocol == "npz":
        np.savez(output, **all_dict)
    else:
        n = cb.pack_conduit_node_from_dict(all_dict)
        cb.dump_node(n, output)
Esempio n. 8
0
def process_args(args):
    data = cb.load_node(args.input)
    if args.schema == "auto":
        schema_json = data[data.child_names()[0]].to_json()
    elif "," in args.schema:
        sub_list = args.schema.split(",")
        schema_node = conduit.Node()
        for item in sub_list:
            schema_node[item] = data[data.child_names()[0] + "/" + item]
        schema_json = schema_node.to_json()
        print(schema_json)
    else:
        with open(args.schema, "r") as f:
            schema_json = f.read()

    g = conduit.Generator(schema_json, "json")
    schema = conduit.Node()
    g.walk_external(schema)

    samples = data.child_names()

    # Walk through all the samples and create a unified list (ie pack into a
    # dictionary of lists)
    all_dict = {}
    for s in samples:
        unfiltered_node = data[s]
        filtered_node = conduit.Node()
        g.walk_external(filtered_node)
        filtered_node.update_compatible(unfiltered_node)
        make_data_array_dict(all_dict, filtered_node)

    # Save according to output extension, either numpy or conduit-compatible
    protocol = cb.determine_protocol(args.output)
    if protocol == "npz":
        np.savez(args.output, **all_dict)
    else:
        n = cb.pack_conduit_node_from_dict(all_dict)
        # n = conduit.Node()
        # g.walk_external(n)
        # n.update_compatible(data)
        # for data_name in all_dict.keys():
        #    n[data_name] = np.array(all_dict[data_name]).
        cb.dump_node(n, args.output)