Exemplo n.º 1
0
    def test_005_mpi_sum_all_reduce(self):
        # skip tests on windows until we work out proper
        # mpi4py install for our windows ci
        if sys.platform == "win32":
            return
        BEGIN_EXAMPLE("py_mpi_sum_all_reduce")
        import conduit
        import conduit.relay as relay
        import conduit.relay.mpi
        from mpi4py import MPI

        # get a comm id from mpi4py world comm
        comm_id = MPI.COMM_WORLD.py2f()
        # get our rank and the comm's size
        comm_rank = relay.mpi.rank(comm_id)
        comm_size = relay.mpi.size(comm_id)

        # gather data all ranks
        # (ranks have nodes with compatible schemas)
        n = conduit.Node(conduit.DataType.int64(4))
        n_res = conduit.Node(conduit.DataType.int64(4))
        # data to reduce
        vals = n.value()
        for i in range(4):
            vals[i] = 1

        relay.mpi.sum_all_reduce(n, n_res, comm=comm_id)
        # answer should be an array with each value == comm_size
        # show result on rank 0
        if comm_rank == 0:
            print("[rank: {}] sum reduce result: {}".format(
                comm_rank, n_res.to_yaml()))

        END_EXAMPLE("py_mpi_sum_all_reduce")
Exemplo n.º 2
0
    def test_006_mpi_all_gather(self):
        # skip tests on windows until we work out proper
        # mpi4py install for our windows ci
        if sys.platform == "win32":
            return
        BEGIN_EXAMPLE("py_mpi_all_gather_using_schema")
        import conduit
        import conduit.relay as relay
        import conduit.relay.mpi
        from mpi4py import MPI

        # get a comm id from mpi4py world comm
        comm_id = MPI.COMM_WORLD.py2f()
        # get our rank and the comm's size
        comm_rank = relay.mpi.rank(comm_id)
        comm_size = relay.mpi.size(comm_id)

        n = conduit.Node(conduit.DataType.int64(4))
        n_res = conduit.Node()
        # data to gather
        vals = n.value()
        for i in range(4):
            vals[i] = comm_rank

        relay.mpi.all_gather_using_schema(n, n_res, comm=comm_id)
        # show result on rank 0
        if comm_rank == 0:
            print("[rank: {}] all gather using schema result: {}".format(
                comm_rank, n_res.to_yaml()))

        END_EXAMPLE("py_mpi_all_gather_using_schema")
Exemplo n.º 3
0
def process_args(args):
    data = cb.load_node(args.input)
    with open(args.schema, 'r') as f:
        schema_json = f.read()

    g = conduit.Generator(schema_json, 'json')
    schema = conduit.Node()
    g.walk_external(schema)

    samples = data.child_names()

    # Walk through all the samples and create a unified list (ie pack into a
    # dictionary of lists)
    all_dict = {}
    for s in samples:
        unfiltered_node = data[s]
        filtered_node = conduit.Node()
        g.walk_external(filtered_node)
        filtered_node.update_compatible(unfiltered_node)
        make_data_array_dict(all_dict, filtered_node)

    # Save according to output extension, either numpy or conduit-compatible
    protocol = cb.determine_protocol(args.output)
    if protocol == 'npz':
        np.savez(args.output, **all_dict)
    else:
        n = conduit.Node()
        g.walk_external(n)
        n.update_compatible(data)
        for data_name in all_dict.keys():
            n[data_name] = np.array(all_dict[data_name])
        cb.dump_node(n, args.output)
Exemplo n.º 4
0
    def test_render_3d(self):
        # if we don't have ascent, simply return
        info = ascent.about()
        if info["runtimes/ascent/vtkm/status"] != "enabled":
            return

        obase = "tout_python_ascent_render_3d"
        ofile = obase + "100.png"
        # clean up old results if they exist
        if os.path.isfile(ofile):
            os.remove(ofile)

        # create example mesh
        n_mesh = conduit.Node()
        conduit.blueprint.mesh.examples.braid("hexs", 10, 10, 10, n_mesh)

        # open ascent
        a = ascent.Ascent()
        a.open()

        a.publish(n_mesh)

        actions = conduit.Node()
        scenes = conduit.Node()
        scenes["s1/plots/p1/type"] = "pseudocolor"
        scenes["s1/plots/p1/field"] = "braid"
        scenes["s1/image_prefix"] = obase

        add_act = actions.append()
        add_act["action"] = "add_scenes"
        add_act["scenes"] = scenes

        a.execute(actions)
        a.close()
        self.assertTrue(os.path.isfile(ofile))
Exemplo n.º 5
0
    def test_bcast_using_schema(self):
        # skip tests on windows until we work out proper
        # mpi4py install for our windows ci
        if sys.platform == "win32":
            return
        from mpi4py import MPI
        comm_id   = MPI.COMM_WORLD.py2f()
        comm_rank = relay.mpi.rank(comm_id)
        comm_size = relay.mpi.size(comm_id)
        
        for root in range(comm_size):
            n = conduit.Node()
            if comm_rank == root:
                n.set(conduit.DataType.int64(3))
                vals = n.value()
                vals[0] = 11
                vals[1] = 22
                vals[2] = 33
            relay.mpi.broadcast_using_schema(n,root,comm_id)
            print(n)
            vals = n.value()
            self.assertEqual(vals[0],11)
            self.assertEqual(vals[1],22)
            self.assertEqual(vals[2],33)

        for root in range(comm_size):
            n = conduit.Node()
            if comm_rank == root:
                n["a/b/c/d/e/f"].set(np.int64(10))
            relay.mpi.broadcast_using_schema(n,root,comm_id)
            val = n["a/b/c/d/e/f"]
            self.assertEqual(val,10)
Exemplo n.º 6
0
    def test_render_flow_inspect(self):
        # if we don't have ascent, simply return
        info = ascent.about()
        if info["runtimes/ascent/status"] != "enabled":
            return

        flow.Workspace.register_builtin_filter_types()
        flow.Workspace.register_filter_type(flow.wrap_function(inspect));

        # create example mesh
        n_mesh = conduit.Node()
        conduit.blueprint.mesh.examples.braid("quads",
                                              10,
                                              10,
                                              0,
                                              n_mesh)

        # open ascent
        a = ascent.Ascent()
        open_opts = conduit.Node()
        open_opts["runtime/type"].set("flow")
        a.open(open_opts)

        a.publish(n_mesh)

        actions = conduit.Node()
        add_py = actions.append()

        add_py["action"] = "add_filter";
        add_py["type_name"]  = "ensure_python";
        add_py["name"] = "py";

        add_ins = actions.append()
        add_ins["action"] = "add_filter";
        add_ins["type_name"]  = "inspect";
        add_ins["name"] = "my_inspect";


        conn_py = actions.append()
        conn_py["action"] = "connect";
        conn_py["src"]  = "source";
        conn_py["dest"] = "py";

        conn_ins = actions.append()
        conn_ins["action"] = "connect";
        conn_ins["src"]  = "py";
        conn_ins["dest"] = "my_inspect";

        print(actions)
        actions.append()["action"] = "execute"
        # this will fail for static libs case
        # b/c the py libs are still dynamic, and they each
        # link their own static copy of flow
        try:
            a.execute(actions)
        except RuntimeError as e:
            if not e.message.count("ensure_python") > 0:
                raise(e)
        a.close()
Exemplo n.º 7
0
def PPL3toCDT3(args):

    nHeader = conduit.Node()
    nHeader['date'] = "Created by PPL3toCDT3hdf5.py at " + datetime.datetime.now().strftime("%m-%d-%Y %H:%M:%S")
    print("Created by PPL3toCDT3hdf5.py at " + datetime.datetime.now().strftime("%m-%d-%Y %H:%M:%S"))

    if not os.path.exists(args.outfile):
        os.makedirs(args.outfile)

    hdf5pathDir = os.path.abspath(args.outfile)
    hdf5path=os.path.join(hdf5pathDir, "dock_proc1.hdf5")
    print(hdf5path)

    conduit.relay.io.save(nHeader, hdf5path)

    comDirPath = os.path.abspath(args.scrDir + "/com")
    print(comDirPath)
    os.chdir(comDirPath)
    dirs = os.listdir(".")
    for recid in dirs:
        recPath = os.path.join(comDirPath, recid+"/dock")
        if os.path.isdir(recPath):
            os.chdir(recPath)
            print(os.getcwd())

            n = conduit.Node()

            ligs=os.listdir(".")

            for ligid in ligs:
                ligPath = os.path.join(recPath, ligid)
                if os.path.isdir(ligPath):
                    os.chdir(ligPath)
                    scorefile=os.path.join(ligPath, 'scores.log')

                    if os.path.isfile(scorefile):
                        entryKey = "/dock/" + recid + "/" + ligid
                        scores=getScore(scorefile)
                        numPose=len(scores)
                        n[entryKey + "/meta/numPose"] = np.int32(numPose)
                        if numPose>0:
                            n[entryKey + "/status"] = np.int32(1)
                            n[entryKey + "/meta/Mesg"] ="Finished!"
                        else:
                            n[entryKey + "/status"] = np.int32(0)
                            n[entryKey + "/meta/Mesg"] = "No Scores!"

                        for key, val in scores.iteritems():
                            n[entryKey+"/meta/scores/"+key]=val

                        fileList = ['poses.pdbqt', 'scores.log']

                        filesToHDF(n, entryKey, fileList)
            try:
                conduit.relay.io.save_merged(n, hdf5path)
            except:
                print(recid+ " cannot be saved into HDF5")
Exemplo n.º 8
0
    def test_render_mpi_2d(self):
        # if we don't have ascent, or mpi simply return

        info = ascent.mpi.about()
        if info["runtimes/ascent/status"] != "enabled":
            print("ascent runtime not enabled, skipping mpi render test")
            return


        obase = "tout_python_ascent_mpi_render_2d"
        ofile = obase + ".png"
        # clean up old results if they exist
        if MPI.COMM_WORLD.rank == 0 and os.path.isfile(ofile):
            os.remove(ofile)
        
        # create example mesh using conduit blueprint
        n_mesh = conduit.Node()
        conduit.blueprint.mesh.examples.braid("uniform",
                                              10,
                                              10,
                                              0,
                                              n_mesh)
        # shift data for rank > 1
        x_origin = MPI.COMM_WORLD.rank * 20 - 10;
        
        n_mesh["state/domain_id"] = MPI.COMM_WORLD.rank
        n_mesh["coordsets/coords/origin/x"] = x_origin
        n_mesh["fields/braid/values"][:] = MPI.COMM_WORLD.rank
        

        # open ascent
        a = ascent.mpi.Ascent()
        ascent_opts = conduit.Node()
        ascent_opts["mpi_comm"].set(MPI.COMM_WORLD.py2f())
        a.open(ascent_opts)

        a.publish(n_mesh)

        actions = conduit.Node()
        scenes  = conduit.Node()
        scenes["s1/plots/p1/type"] = "pseudocolor"
        scenes["s1/plots/p1/params/field"] = "braid"
        scenes["s1/image_prefix"] = obase

        add_act =actions.append()
        add_act["action"] = "add_scenes"
        add_act["scenes"] = scenes

        actions.append()["action"] = "execute"

        a.execute(actions)
        a.close()
        
        # use barrier to avoid problems
        # some ranks may finish before file is written
        MPI.COMM_WORLD.Barrier()
        self.assertTrue(os.path.isfile(ofile))
    def test_io_handle(self):
        tbase = "tout_python_relay_io_handle."
        protos = ["conduit_bin",
                  "json",
                  "conduit_json",
                  "conduit_base64_json",
                  "yaml"]
        
        # only test hdf5 if relay was built with hdf5 support
        if relay.io.about()["protocols/hdf5"] == "enabled":
            protos.append("hdf5")
        for proto in protos:
            test_file = tbase + proto
            if os.path.isfile(test_file):
                os.remove(test_file)
            
            n = conduit.Node();
            n["a"] = int64(20);
            n["b"] = int64(8);
            n["c"] = int64(12);
            n["d/here"] = int64(10);

            h = conduit.relay.io.IOHandle();
            h.open(test_file)
            h.write(n)
            self.assertTrue(h.has_path("d/here"))
            cnames = h.list_child_names()
            self.assertTrue(cnames[0] == "a")
            self.assertTrue(cnames[1] == "b")
            self.assertTrue(cnames[2] == "c")
            self.assertTrue(cnames[3] == "d")
            cnames = h.list_child_names("d")
            self.assertTrue(cnames[0] == "here")
            h.remove("d");
            self.assertFalse(h.has_path("d"))
            self.assertFalse(h.has_path("d/here"))
            h.close();
            n2 = conduit.Node()
            h2 = conduit.relay.io.IOHandle();
            h2.open(test_file);
            cnames = h2.list_child_names();
            self.assertTrue(cnames[0] == "a")
            self.assertTrue(cnames[1] == "b")
            self.assertTrue(cnames[2] == "c")
            n_val = conduit.Node()
            n_val.set(int64(10))
            h2.write(n_val,"d/here")
            h2.read(n2);
            info = conduit.Node()
            self.assertFalse(n.diff(n2,info,0.0))
            n_val.reset()
            h2.read(n_val,"c");
            self.assertTrue(n_val.value() == 12)
            h2.close()
Exemplo n.º 10
0
    def test_reduce_helpers(self):
        # skip tests on windows until we work out proper
        # mpi4py install for our windows ci
        if sys.platform == "win32":
            return
        from mpi4py import MPI
        comm_id   = MPI.COMM_WORLD.py2f()
        comm_rank = relay.mpi.rank(comm_id)
        comm_size = relay.mpi.size(comm_id)
        snd = conduit.Node(conduit.DataType.int64(5))
        rcv = conduit.Node(conduit.DataType.int64(5))

        snd_vals = snd.value()
        rcv_vals = rcv.value()

        # sum
        print("sum")
        for i in range(5):
            snd_vals[i] = 10
        relay.mpi.sum_reduce(snd, rcv, 0, comm_id);
        if comm_rank == 0:
            print(rcv_vals)
            for i in range(5):
                self.assertEqual(rcv_vals[i], 10 * comm_size)

        # prod
        print("prod")
        for i in range(5):
            snd_vals[i] = 2
        relay.mpi.prod_reduce(snd, rcv, 0, comm_id)
        if comm_rank == 0:
            print(rcv_vals)
            for i in range(5):
                self.assertEqual(rcv_vals[i], math.pow(comm_size,2) )

        # max
        print("max")
        for i in range(5):
            snd_vals[i] = comm_rank * 10 +1
        relay.mpi.max_reduce(snd, rcv, 0, comm_id)
        if comm_rank == 0:
            print(rcv_vals)
            for i in range(5):
                self.assertEqual(rcv_vals[i], 10 * (comm_size-1) + 1 )

        # min 
        print("min")
        for i in range(5):
            snd_vals[i] = comm_rank * 10 +1
        relay.mpi.min_reduce(snd, rcv, 0, comm_id)
        if comm_rank == 0:
            print(rcv_vals)
            for i in range(5):
                self.assertEqual(rcv_vals[i], 1)
    def test_001_io_handle(self):
        import conduit.relay
        if conduit.relay.io.about()["protocols/hdf5"] != "enabled":
            return
        BEGIN_EXAMPLE("py_relay_io_handle")
        import conduit
        import conduit.relay.io

        n = conduit.Node()
        n["a/data"] = 1.0
        n["a/more_data"] = 2.0
        n["a/b/my_string"] = "value"
        print("\nNode to write:")
        print(n)

        # save to hdf5 file using the path-based api
        conduit.relay.io.save(n, "my_output.hdf5")

        # inspect and modify with an IOHandle
        h = conduit.relay.io.IOHandle()
        h.open("my_output.hdf5")

        # check for and read a path we are interested in
        if h.has_path("a/data"):
            nread = conduit.Node()
            h.read(nread, "a/data")
            print('\nValue at "a/data" = {0}'.format(nread.value()))

        # check for and remove a path we don't want
        if h.has_path("a/more_data"):
            h.remove("a/more_data")
            print('\nRemoved "a/more_data"')

        # verify the data was removed
        if not h.has_path("a/more_data"):
            print('\nPath "a/more_data" is no more')

        # write some new data
        print('\nWriting to "a/c"')
        n.set(42.0)
        h.write(n, "a/c")

        # find the names of the children of "a"
        cnames = h.list_child_names("a")
        print('\nChildren of "a": {0}'.format(cnames))

        nread = conduit.Node()
        # read the entire contents
        h.read(nread)

        print("\nRead Result:")
        print(nread)
        END_EXAMPLE("py_relay_io_handle")
Exemplo n.º 12
0
    def extract(self, *args, **kargs):
        import conduit
        import conduit.relay
        if not isinstance(self.feature, list):
            features = [self.feature]
        else:
            features = self.feature

        ioh = conduit.relay.io.IOHandle()
        ioh.open(self.uri, "sidre_hdf5")
        # look for self.feature
        bp_idx = conduit.Node()
        ioh.read(bp_idx, "root/blueprint_index")
        bp_path = conduit.Node()
        ndoms = conduit.Node()
        ioh.read(bp_path, "root/blueprint_index")

        out = []
        for feature in features:
            # split feature name back to mesh + field
            sp = feature.split("/")
            mesh = sp[0]
            field = "/".join(sp[1:])
            # get number of domains for this mesh
            ioh.read(
                ndoms,
                "root/blueprint_index/{}/state/number_of_domains".format(mesh))
            ndoms = ndoms.value()
            # get the path to the selected field in the bulk data
            pth = "root/blueprint_index/{}/fields/{}/path".format(mesh, field)
            if self.format == "sidre/path":
                out.append([ioh, pth])
                continue
            ioh.read(bp_path, pth)
            bp_path = bp_path.value()
            res = None
            vals = conduit.Node()
            for i in range(ndoms):
                dom_path = "%d/" % i
                dom_path += bp_path + "/values"
                ioh.read(vals, dom_path)
                npy_array = vals.value()
                if res is not None:
                    res = numpy.concatenate([res, npy_array])
                else:
                    res = npy_array
            out.append(res)
        if len(features) == 1:
            return out[0]
        else:
            return out
Exemplo n.º 13
0
    def test_delete_scene(self):
        # exec again, but this time remove a scene
        # tests regression related to internal book keeping
        # with graph setup

        mesh = conduit.Node()
        conduit.blueprint.mesh.examples.braid("hexs", 5, 5, 5, mesh)
        a = ascent.Ascent()
        opts = conduit.Node()
        opts["exceptions"] = "forward"
        a.open(opts)
        a.publish(mesh)

        actions = conduit.Node()

        add_act = actions.append()
        add_act["action"] = "add_pipelines"
        pipelines = add_act["pipelines"]
        pipelines["pl1/f1/type"] = "contour"
        contour_params = pipelines["pl1/f1/params"]
        contour_params["field"] = "braid"
        iso_vals = np.array([0.2, 0.4], dtype=np.float32)
        contour_params["iso_values"].set(iso_vals)

        add_act2 = actions.append()
        add_act2["action"] = "add_scenes"
        scenes = add_act2["scenes"]
        scenes["s1/plots/p1/type"] = "pseudocolor"
        scenes["s1/plots/p1/pipeline"] = "pl1"
        scenes["s1/plots/p1/field"] = "braid"
        # set the output file name (ascent will add ".png")
        scenes["s1/image_name"] = "out_pipeline_ex1_contour"
        scenes["s2/plots/p1/type"] = "pseudocolor"
        scenes["s2/plots/p1/pipeline"] = "pl1"
        scenes["s2/plots/p1/field"] = "braid"
        # set the output file name (ascent will add ".png")
        scenes["s2/image_name"] = "out_pipeline_ex1_contour_blah"

        # print our full actions tree
        print(actions.to_yaml())
        # execute the actions
        a.execute(actions)

        # now exec w/o s1
        scenes.remove(path="s1")

        # print our full actions tree
        print(actions.to_yaml())
        # execute the actions
        a.execute(actions)
Exemplo n.º 14
0
def run(_input, output, schema):
    print(WARN)
    protocol = cb.determine_protocol(output)
    # Faster loader, just read metadata
    data_loader = cb.load_node_handle(_input)
    first_data = conduit.Node()
    data_loader.read(first_data, data_loader.list_child_names()[0])
    if schema == "auto":
        schema_json = first_data.to_json()
    elif "," in schema:
        sub_list = schema.split(",")
        schema_node = conduit.Node()
        for item in sub_list:
            schema_node[item] = first_data[item]
        schema_json = schema_node.to_json()
    else:
        with open(schema, "r") as f:
            schema_json = f.read()

    g = conduit.Generator(schema_json, "json")
    schema = conduit.Node()
    g.walk_external(schema)

    data_paths = []
    for path, _ in generate_scalar_path_pairs(schema):
        data_paths.append(path)
    samples = data_loader.list_child_names()

    # Walk through all the samples and create a unified list (ie pack into a
    # dictionary of lists)
    all_dict = {}
    for s in samples:
        filtered_node = conduit.Node()
        for path in data_paths:
            sample_path = "/".join((s, path))
            if data_loader.has_path(sample_path):
                data_loader.read(filtered_node[path], sample_path)
            else:
                filtered_node[
                    sample_path] = np.nan  # if a value is missing, that could be a problem
        make_data_array_dict(all_dict, filtered_node)

    for dat in all_dict.keys():
        all_dict[dat] = np.vstack(all_dict[dat])
    # Save according to output extension, either numpy or conduit-compatible
    if protocol == "npz":
        np.savez(output, **all_dict)
    else:
        n = cb.pack_conduit_node_from_dict(all_dict)
        cb.dump_node(n, output)
Exemplo n.º 15
0
def pack_conduit_node_from_dict(d):
    """
    If d is a dict, returns a conduit node, unpacked recursively using the
    dictionary to create the conduit hierarchy.

    If d is not a dict, simply returns d, to avoid extra conduit nodes being
    created. The intent is that the return value is then assigned to a conduit
    node by the caller and conduit will handle packing it appropriately.

    Conduit currently supports following basic types:
    int32, uint32, int64, uint64, float32, float64, numpy arrays of (uint32,
    uin64, float32, float64), strings.

    There is a bug in the current version of conduit that prevents usage of
    numpy arrays of signed ints.

    Notably, lists, dicts, tuples, and unicode strings can not be assigned
    directly as a conduit node. numpy is your friend. This method will auto
    convert tuples into numpy arrays of float64, will iterate over lists and
    give them each a separate entry in the hierarchy, labeled by their index,
    and assume any dicts are meant as part of the hierarchy description so the
    dict hierarchy ends up in the conduit hierarchy.
    """
    if isinstance(d, dict):
        node = conduit.Node()
        for k in d:
            try:
                node[str(k)] = pack_conduit_node_from_dict(d[k])
            except TypeError:
                try:
                    if isinstance(d[k], list):
                        node[str(k)] = create_conduit_node_from_list(d[k])
                    elif isinstance(d[k], tuple):
                        node[str(k)] = np.array(list(d[k]), dtype="float64")
                    else:
                        LOG.error("Conduit does not support following value", k, d[k])
                except TypeError:
                    LOG.error("Conduit does not support following value", k, d[k])
        return node
    else:
        if isinstance(d, (list, tuple, np.ndarray)):
            d_a = np.asarray(d)
            node = conduit.Node()
            node["data"] = d_a
            if len(d_a.shape) > 1:
                node["metadata/shape"] = d_a.shape
            return node
        if d is None:
            return "None"
        return d
Exemplo n.º 16
0
def saveDataByName(args):

    hdf5path = os.path.abspath(args.infile)
    hdf5pathOut = os.path.abspath(args.outfile)

    n = conduit.Node()
    relay.io.load(n, hdf5path)

    cmpdKey = "rec/" + args.savename

    nOut = conduit.Node()

    nOut[cmpdKey] = n[cmpdKey]

    relay.io.save(nOut, hdf5pathOut)
Exemplo n.º 17
0
def saveSDFtoHDF(args):
    if os.path.isfile(args.sdffile):
        with open(args.sdffile, 'r') as f:
            nSDF = conduit.Node()
            nSDF['SDF'] = f.read()
            hdf5path = os.path.abspath(args.outfile)
            conduit.relay.io.save_merged(nSDF, hdf5path)
Exemplo n.º 18
0
def PPL4toCDT4(args):

    nHeader = conduit.Node()
    nHeader[
        'date'] = "Created by PPL4toCDT4hdf5.py at " + datetime.datetime.now(
        ).strftime("%m-%d-%Y %H:%M:%S")
    print("Created by PPL4toCDT4hdf5.py at " +
          datetime.datetime.now().strftime("%m-%d-%Y %H:%M:%S"))

    if not os.path.exists(args.outfile):
        os.makedirs(args.outfile)

    hdf5pathDir = os.path.abspath(args.outfile)
    hdf5path = os.path.join(hdf5pathDir, "gbsa_proc1.hdf5")
    print(hdf5path)

    conduit.relay.io.save_merged(nHeader, hdf5path)

    comDirPath = os.path.abspath(args.scrDir + "/com")
    print(comDirPath)
    os.chdir(comDirPath)
    dirs = os.listdir(".")
    for recid in dirs:
        recPath = os.path.join(comDirPath, recid + "/gbsa")
        if os.path.isdir(recPath):
            os.chdir(recPath)
            print(os.getcwd())
            if args.iszip:
                ligWorkFlowZip(recPath, recid, hdf5path)
            else:
                ligWorkFlow(recPath, recid, hdf5path)
    def test_002_io_handle_sidre(self):
        import conduit.relay
        if conduit.relay.io.about()["protocols/hdf5"] != "enabled":
            return
        BEGIN_EXAMPLE("py_relay_io_handle_sidre")
        import conduit
        import conduit.relay.io

        # this example reads a sample hdf5 sidre style file
        input_fname = relay_test_data_path(
            "texample_sidre_basic_ds_demo.sidre_hdf5")

        # open our sidre file for read with an IOHandle
        h = conduit.relay.io.IOHandle()
        h.open(input_fname, "sidre_hdf5")

        # find the names of the children at the root
        cnames = h.list_child_names()
        print('\nChildren at root {0}'.format(cnames))

        nread = conduit.Node()
        # read the entire contents
        h.read(nread)

        print("Read Result:")
        print(nread)

        END_EXAMPLE("py_relay_io_handle_sidre")
Exemplo n.º 20
0
def make_dummy_node():
    x = conduit.Node()
    data_types = [1, 1.0, "hi", "c"]
    for d in data_types:
        key = str(type(d))
        x[key] = d
    return x
 def test_001_basics_very_basic(self):
     BEGIN_EXAMPLE("py_basics_very_basic")
     import conduit
     n = conduit.Node()
     n["my"] = "data"
     print(n)
     END_EXAMPLE("py_basics_very_basic")
    def test_009_basics_fetch_exist(self):
        BEGIN_EXAMPLE("py_basics_fetch_exist")
        # setup a node with a leaf array
        n = conduit.Node()
        data = numpy.zeros((5, ), dtype=numpy.float64)
        n["my/path/to/data"] = data

        # access via fetch existing
        # first fetch the node
        n_data = n.fetch_existing("my/path/to/data")
        # then the value
        my_data = n_data.value()
        print("== this will be an ndarray == ")
        print("data: ", my_data)
        print("repr: ", repr(my_data))
        print()

        # using fetch_existing,
        # if the path doesn't exist - we will get an Exception
        try:
            n_data = n.fetch_existing("my/path/TYPO/data")
        except Exception as e:
            print("Here is what went wrong:")
            print(e)

        END_EXAMPLE("py_basics_fetch_exist")
Exemplo n.º 23
0
def getDataByligID(args, id=None):
    if not id:
        id = args.ligid

    hdf5path = os.path.abspath(args.infile)

    dirpath = os.path.dirname(hdf5path)
    ligpath = os.path.join(dirpath, 'lig/' + id)

    if not os.path.exists(ligpath):
        os.makedirs(ligpath)

    os.chdir(ligpath)

    n = conduit.Node()
    relay.io.load(n, hdf5path)

    cmpdKey = "lig/" + id
    print("Ligand ID", id)
    print("Ligand Name", n[cmpdKey + '/meta/name'])
    print("status", n[cmpdKey + '/status'])
    print("Mesg", n[cmpdKey + '/meta/Mesg'])
    print("GBSA", n[cmpdKey + '/meta/GBEN'])
    print("Ligand Old path", n[cmpdKey + '/meta/LigPath'])
    print("Ligand data path", os.getcwd())

    #fileList = ['LIG.inpcrd', 'LIG.lib', 'LIG.prmtop', 'LIG_min.rst', 'LIG_minGB.out', 'ligand.frcmod', 'LIG_min.pdbqt']
    itr = n[cmpdKey + "/file"].children()

    for fileItr in itr:
        with open(fileItr.name(), 'w') as f:
            f.write(fileItr.node().value())
Exemplo n.º 24
0
def test_load_handle():
    base = "_dummy"
    exts = (".h5", ".hdf5", ".json", ".yaml", ".cbin")
    x = make_dummy_node()
    y = conduit.Node()
    y["x"] = x
    y["z"] = "z"
    save_node_many(y, base=base, exts=exts)
    just_handles = load_node_many(base=base, exts=exts, path=None)
    just_x = load_node_many(base=base, exts=exts, path="x")
    bad_path1 = load_node_many(base=base, exts=exts, path="bogus")
    bad_path2 = load_node_many(base=base, exts=exts, path="")
    all_data = load_node_many(base=base, exts=exts, path="/")
    for h in just_handles:
        assert h.has_path("x")
        assert h.has_path("z")
        h.close()
    for h in just_x:
        assert nodes_equal(h, x)
    for h in bad_path1:
        assert h is None
    for h in bad_path2:
        assert h is None
    for h in all_data:
        assert nodes_equal(h, y)
    delete_data(base=base, exts=exts)
Exemplo n.º 25
0
def make_schema_compatible(original_node, add_uuid):
    node = conduit.Node()
    if add_uuid:
        node[str(uuid4())] = original_node
    else:
        node = original_node
    return node
Exemplo n.º 26
0
def dump_node(
    conduit_node,
    fname,
    dump_options=(
        ("hdf5/chunking/threshold", 2000),
        ("hdf5/chunking/chunk_size", 2000),
    ),
):
    """
    Saves a conduit node to disk. Protocol determined by fname extension.
    Protocol can be conduit_bin, hdf5, json,
    silo, json64, and probably others. Will turn on compression for hdf5.
    """
    protocol = determine_protocol(fname)
    # If hdf5, turn on compression.
    if protocol == "hdf5":
        save_options = conduit.Node()
        for opt in dump_options:
            save_options[opt[0]] = opt[1]
        try:
            conduit.relay.io.save(conduit_node, fname, options=save_options)
        except TypeError:  # Conduit version needs to be updated.
            LOG.error(
                "Unable to customize save: please upgrade conduit to "
                "expose save options!"
            )
            conduit.relay.io.save(conduit_node, fname)
    else:
        conduit.relay.io.save(conduit_node, fname)
Exemplo n.º 27
0
    def test_inception(self):
        flow.Workspace.clear_supported_filter_types()
        flow.Workspace.register_builtin_filter_types()
        flow.Workspace.register_filter_type(flow.wrap_function(src))

        w = flow.Workspace()

        w.graph().add_filter("src", "s")
        #
        py_params = conduit.Node()
        py_params["source"] = py_script

        w.graph().add_filter("python_script", "py", py_params)

        w.graph().connect("s", "py", "in")

        print(w)
        w.execute()

        print("Reg from Py")
        print(w.registry())

        v = w.registry().fetch("py")

        self.assertEqual(v, 42)
Exemplo n.º 28
0
def process_args(args):
    print(WARN)
    files = args.infiles
    nfiles = len(files)
    if not args.chunk_size:
        chunk_size = nfiles
    else:
        chunk_size = args.chunk_size

    fileno = 0
    results = []
    for group in grouper(files, chunk_size):
        result = conduit.Node()
        for path in group:
            if not path:
                continue
            try:
                subnode = cb.load_node(path)
                subnode = make_schema_compatible(subnode, args.add_uuid)
                for top_path in subnode.child_names():

                    if top_path in results:
                        print("Error! Already in results: " + top_path)
                        new_path = "-".join((top_path, str(uuid4())))
                        print("Renaming duplicate to node to: " + new_path)
                    else:
                        new_path = top_path

                    result[new_path] = subnode[top_path]
                    results.append(top_path)
            except:
                print("Unable to load " + path)

        cb.dump_node(result, savename(fileno, args))
        fileno = fileno + 1
Exemplo n.º 29
0
def getMetaData(args):
    hdf5path = os.path.abspath(args.infile)

    n = conduit.Node()
    relay.io.load(n, hdf5path)
    recItr = n['rec'].children()

    outfh = open(args.meta, "w")
    outfh.write(
        "Rec_Name, Status, GBSA, Volume, Cent_x, Cent_y, Cent_z, Dim_x, Dim_y, Dim_Z\n"
    )

    for rec in recItr:
        nrec = rec.node()
        recName = rec.name()
        status = nrec['status']
        gbsa = nrec['meta/GBEN']
        vol = nrec['meta/Site/Volume']
        cent_x = nrec['meta/Site/Centroid/X']
        cent_y = nrec['meta/Site/Centroid/Y']
        cent_z = nrec['meta/Site/Centroid/Z']
        dim_x = nrec['meta/Site/Dimension/X']
        dim_y = nrec['meta/Site/Dimension/Y']
        dim_z = nrec['meta/Site/Dimension/Z']
        outfh.write("{}, {}, {}, {}, {}, {}, {}, {}, {}, {}\n".format(
            recName, status, gbsa, vol, cent_x, cent_y, cent_z, dim_x, dim_y,
            dim_z))
Exemplo n.º 30
0
def extractScoreOnly(args):
    outfh = open(args.scoreonly, "w")
    outfh.write("rec, lig, Scoreonly\n")

    dirpath = os.path.abspath("scratch")
    hdf5pathDir = os.path.abspath(args.indir)

    os.chdir(hdf5pathDir)
    h5files = glob.glob('dock_proc*.hdf5')

    for h5f in h5files:
        n = conduit.Node()
        conduit.relay.io.load(n, h5f)
        itrRec = n["dock"].children()
        for rec in itrRec:
            recid = rec.name()
            nrec = rec.node()
            itrLig = nrec.children()
            for lig in itrLig:
                ligid = lig.name()
                entryKey = "dock/" + recid + "/" + ligid
                ndata = n[entryKey]
                if ndata.has_path("file/scores.log"):
                    scoreslog = ndata["file/scores.log"]
                    strs = scoreslog.split()
                    if strs[0] == "Affinity:":
                        outfh.write(recid + ", " + ligid + ", " + strs[1] +
                                    "\n")