Esempio n. 1
0
def OpenDataFile(filename, **extraArgs):
    """Creates a reader to read the give file, if possible.
       This uses extension matching to determine the best reader possible.
       If a reader cannot be identified, then this returns None."""
    session = servermanager.ActiveConnection.Session
    reader_factor = servermanager.vtkSMProxyManager.GetProxyManager().GetReaderFactory()
    if reader_factor.GetNumberOfRegisteredPrototypes() == 0:
      reader_factor.RegisterPrototypes(session, "sources")
    first_file = filename
    if type(filename) == list:
        first_file = filename[0]
    if not reader_factor.TestFileReadability(first_file, session):
        msg = "File not readable: %s " % first_file
        raise RuntimeError, msg
    if not reader_factor.CanReadFile(first_file, session):
        msg = "File not readable. No reader found for '%s' " % first_file
        raise RuntimeError, msg
    prototype = servermanager.ProxyManager().GetPrototypeProxy(
      reader_factor.GetReaderGroup(), reader_factor.GetReaderName())
    xml_name = paraview.make_name_valid(prototype.GetXMLLabel())
    reader_func = _create_func(xml_name, servermanager.sources)
    if prototype.GetProperty("FileNames"):
      reader = reader_func(FileNames=filename, **extraArgs)
    else :
      reader = reader_func(FileName=filename, **extraArgs)
    return reader
Esempio n. 2
0
def ExecData(self, inputDS, selection, compositeDataSet = None):
    """inputDS is a non-composite data object. If it is a leaf of a composite
    data set, pass the entire data set as compositeDataSet."""

    selection_node = selection.GetNode(0)
    array_association = 1

    # convert from vtkSelectionNode's field type to vtkDataObject's field association
    if(selection_node.GetFieldType() == 0):
      array_association = 1
    elif(selection_node.GetFieldType() == 1):
      array_association = 0

    # wrap the data objects. makes them easier to use.
    do = dataset_adapter.WrapDataObject(inputDS)
    dsa = dataset_adapter.DataSetAttributes(
      inputDS.GetAttributes(array_association),
      do, array_association)

    new_locals = {}
    # define global variables for all the arrays.
    for arrayname in dsa.keys():
        name = paraview.make_name_valid(arrayname)
        array = dsa[arrayname]
        if compositeDataSet:
            compIter = CompositeDataArrayIterable(
                compositeDataSet, arrayname, array_association)
            new_locals[name + "_composite"] = compIter
            array.composite_iterator = compIter
        new_locals[name] = array
    new_locals["cell"] = do
    new_locals["dataset"] = do
    new_locals["input"] = do
    new_locals["element"] = do
    new_locals["id"] = arange(inputDS.GetNumberOfElements(
        array_association))

    # evaluate the query expression. The expression should return a mask which
    # is either an array or a boolean value.
    mask = None

    if len(selection_node.GetQueryString()) > 0:
      try:
        mask = eval(selection_node.GetQueryString(), globals(), new_locals)
      except NameError:
        pass

    # extract the elements from the input dataset using the mask.
    extracted_ds = ExtractElements(self, inputDS, selection, mask)

    del mask
    del new_locals
    del do
    del dsa
    return extracted_ds
Esempio n. 3
0
def get_arrays(attribs, controller=None):
    """Returns a 'dict' referring to arrays in dsa.DataSetAttributes or
    dsa.CompositeDataSetAttributes instance.

    When running in parallel, this method will ensure that arraynames are
    reduced across all ranks and for any arrays missing on the local process, a
    NoneArray will be added to the returned dictionary. This ensures that
    expressions evaluate without issues due to missing arrays on certain ranks.
    """
    if not isinstance(attribs, dsa.DataSetAttributes) and \
        not isinstance(attribs, dsa.CompositeDataSetAttributes):
        raise ValueError(
            "Argument must be DataSetAttributes or CompositeDataSetAttributes."
        )
    arrays = dict()
    for key in attribs.keys():
        varname = paraview.make_name_valid(key)
        arrays[varname] = attribs[key]

    # If running in parallel, ensure that the arrays are synced up so that
    # missing arrays get NoneArray assigned to them avoiding any unnecessary
    # errors when evaluating expressions.
    if controller is None and vtkMultiProcessController is not None:
        controller = vtkMultiProcessController.GetGlobalController()
    if controller and controller.IsA(
            "vtkMPIController") and controller.GetNumberOfProcesses() > 1:
        from mpi4py import MPI
        comm = vtkMPI4PyCommunicator.ConvertToPython(
            controller.GetCommunicator())
        rank = comm.Get_rank()

        # reduce the array names across processes to ensure arrays missing on
        # certain ranks are handled correctly.
        arraynames = list(arrays)  # get keys from the arrays as a list.
        # gather to root and then broadcast
        # I couldn't get Allgather/Allreduce to work properly with strings.
        gathered_names = comm.gather(arraynames, root=0)
        # gathered_names is a list of lists.
        if rank == 0:
            result = set()
            for alist in gathered_names:
                for val in alist:
                    result.add(val)
            gathered_names = [x for x in result]
        arraynames = comm.bcast(gathered_names, root=0)
        for name in arraynames:
            if name not in arrays:
                arrays[name] = dsa.NoneArray
    return arrays
def ExecData(self, inputDS, selection):
    """inputDS is either a non-composite data object"""

    selection_node = selection.GetNode(0)
    array_association = 1

    # convert from vtkSelectionNode's field type to vtkDataObject's field association
    if (selection_node.GetFieldType() == 0):
        array_association = 1
    elif (selection_node.GetFieldType() == 1):
        array_association = 0

    # wrap the data objects. makes them easier to use.
    do = dataset_adapter.WrapDataObject(inputDS)
    dsa = dataset_adapter.DataSetAttributes(
        inputDS.GetAttributes(array_association), do, array_association)

    new_locals = {}
    # define global variables for all the arrays.
    for arrayname in dsa.keys():
        name = paraview.make_name_valid(arrayname)
        new_locals[name] = dsa[arrayname]

    new_locals["cell"] = do
    new_locals["dataset"] = do
    new_locals["input"] = do
    new_locals["element"] = do
    new_locals["id"] = arange(inputDS.GetNumberOfElements(array_association))

    # evaluate the query expression. The expression should return a mask which
    # is either an array or a boolean value.
    mask = None

    if len(selection_node.GetQueryString()) > 0:
        try:
            mask = eval(selection_node.GetQueryString(), globals(), new_locals)
        except NameError:
            pass

    # extract the elements from the input dataset using the mask.
    extracted_ds = ExtractElements(self, inputDS, selection, mask)

    del mask
    del new_locals
    del do
    del dsa
    return extracted_ds
Esempio n. 5
0
def get_arrays(attribs, controller=None):
    """Returns a 'dict' referring to arrays in dsa.DataSetAttributes or
    dsa.CompositeDataSetAttributes instance.

    When running in parallel, this method will ensure that arraynames are
    reduced across all ranks and for any arrays missing on the local process, a
    NoneArray will be added to the returned dictionary. This ensures that
    expressions evaluate without issues due to missing arrays on certain ranks.
    """
    if not isinstance(attribs, dsa.DataSetAttributes) and \
        not isinstance(attribs, dsa.CompositeDataSetAttributes):
            raise ValueError (
                "Argument must be DataSetAttributes or CompositeDataSetAttributes.")
    arrays = dict()
    for key in attribs.keys():
        varname = paraview.make_name_valid(key)
        arrays[varname] = attribs[key]


    # If running in parallel, ensure that the arrays are synced up so that
    # missing arrays get NoneArray assigned to them avoiding any unnecessary
    # errors when evaluating expressions.
    if controller is None and vtkMultiProcessController is not None:
        controller = vtkMultiProcessController.GetGlobalController()
    if controller and controller.IsA("vtkMPIController") and controller.GetNumberOfProcesses() > 1:
        from mpi4py import MPI
        comm = vtkMPI4PyCommunicator.ConvertToPython(controller.GetCommunicator())
        rank = comm.Get_rank()

        # reduce the array names across processes to ensure arrays missing on
        # certain ranks are handled correctly.
        arraynames = arrays.keys()
        # gather to root and then broadcast
        # I couldn't get Allgather/Allreduce to work properly with strings.
        gathered_names = comm.gather(arraynames, root=0)
          # gathered_names is a list of lists.
        if rank == 0:
            result = set()
            for list in gathered_names:
                for val in list: result.add(val)
            gathered_names = [x for x in result]
        arraynames = comm.bcast(gathered_names, root=0)
        for name in arraynames:
            if not arrays.has_key(name):
                arrays[name] = dsa.NoneArray
    return arrays
Esempio n. 6
0
def OpenDataFile(filename, **extraArgs):
    """Creates a reader to read the give file, if possible.
       This uses extension matching to determine the best reader possible.
       If a reader cannot be identified, then this returns None."""
    reader_factor = servermanager.ProxyManager().GetReaderFactory()
    if  reader_factor.GetNumberOfRegisteredPrototypes() == 0:
      reader_factor.RegisterPrototypes("sources")
    cid = servermanager.ActiveConnection.ID
    if not reader_factor.TestFileReadability(filename, cid):
        raise RuntimeError, "File not readable: %s " % filename
    if not reader_factor.CanReadFile(filename, cid):
        raise RuntimeError, "File not readable. No reader found for '%s' " % filename
    prototype = servermanager.ProxyManager().GetPrototypeProxy(
      reader_factor.GetReaderGroup(), reader_factor.GetReaderName())
    xml_name = paraview.make_name_valid(prototype.GetXMLLabel())
    reader = globals()[xml_name](FileName=filename, **extraArgs)
    return reader
Esempio n. 7
0
def compute(inputs, association, expression):
    import paraview

    fd0 = inputs[0].GetAttributes(association)

    # Fill up arrays and locals variable list with
    arrays = {}
    for key in fd0.keys():
        name = paraview.make_name_valid(key)
        arrays[name] = fd0[key]

    #  build the locals environment used to eval the expression.
    mylocals = dict(arrays.items())
    mylocals["arrays"] = arrays
    mylocals["inputs"] = inputs
    try:
        mylocals["points"] = inputs[0].Points
    except: pass
    retVal = eval(expression, globals(), mylocals)

    return retVal
Esempio n. 8
0
def compute(inputs, association, expression):
    import paraview

    fd0 = inputs[0].GetAttributes(association)

    # Fill up arrays and locals variable list with
    arrays = {}
    for key in fd0.keys():
        name = paraview.make_name_valid(key)
        arrays[name] = fd0[key]

    #  build the locals environment used to eval the expression.
    mylocals = dict(arrays.items())
    mylocals["arrays"] = arrays
    mylocals["inputs"] = inputs
    try:
        mylocals["points"] = inputs[0].Points
    except:
        pass
    retVal = eval(expression, globals(), mylocals)

    return retVal
Esempio n. 9
0
def OpenDataFile(filename, **extraArgs):
    """Creates a reader to read the give file, if possible.
       This uses extension matching to determine the best reader possible.
       If a reader cannot be identified, then this returns None."""
    reader_factor = servermanager.ProxyManager().GetReaderFactory()
    if reader_factor.GetNumberOfRegisteredPrototypes() == 0:
        reader_factor.RegisterPrototypes("sources")
    session = servermanager.ActiveConnection.Session
    first_file = filename
    if type(filename) == list:
        first_file = filename[0]
    if not reader_factor.TestFileReadability(first_file, session):
        raise RuntimeError, "File not readable: %s " % first_file
    if not reader_factor.CanReadFile(first_file, session):
        raise RuntimeError, "File not readable. No reader found for '%s' " % first_file
    prototype = servermanager.ProxyManager().GetPrototypeProxy(
        reader_factor.GetReaderGroup(), reader_factor.GetReaderName())
    xml_name = paraview.make_name_valid(prototype.GetXMLLabel())
    reader_func = _create_func(xml_name, servermanager.sources)
    if prototype.GetProperty("FileNames"):
        reader = reader_func(FileNames=filename, **extraArgs)
    else:
        reader = reader_func(FileName=filename, **extraArgs)
    return reader
def ExecData(self, inputDS, selection, compositeDataSet = None):
    """inputDS is a non-composite data object. If it is a leaf of a composite
    data set, pass the entire data set as compositeDataSet."""

    selection_node = selection.GetNode(0)
    array_association = 1

    # convert from vtkSelectionNode's field type to vtkDataObject's field association
    if(selection_node.GetFieldType() == 0):
      array_association = 1
    elif(selection_node.GetFieldType() == 1):
      array_association = 0

    # wrap the data objects. makes them easier to use.
    do = dataset_adapter.WrapDataObject(inputDS)
    dsa = dataset_adapter.DataSetAttributes(
      inputDS.GetAttributes(array_association),
      do, array_association)

    # Global operations like global_max, etc require that all processes have
    # all array names available on all processors.
    # Sync all of the array names if using multiple processes.
    # Use empty array by default, then override them with the data from this
    # node.
    new_locals = {}
    if vtkProcessModule.GetProcessModule().GetNumberOfLocalPartitions() > 1:
        from mpi4py import MPI
        allArrayNames = set([paraview.make_name_valid(name) for name in dsa.keys()])
        arrayNames = MPI.COMM_WORLD.allgather(list(allArrayNames))
        for rankNames in arrayNames:
            for arrayName in rankNames:
                allArrayNames.add(arrayName)
        for arrayName in allArrayNames:
            new_locals[arrayName] = dataset_adapter.VTKArray([])

    # define global variables for all the arrays.
    for arrayname in dsa.keys():
        name = paraview.make_name_valid(arrayname)
        array = dsa[arrayname]
        if compositeDataSet:
            compIter = CompositeDataArrayIterable(
                compositeDataSet, arrayname, array_association)
            new_locals[name + "_composite"] = compIter
            array.composite_iterator = compIter
        new_locals[name] = array
    new_locals["cell"] = do
    new_locals["dataset"] = do
    new_locals["input"] = do
    new_locals["element"] = do
    new_locals["id"] = arange(inputDS.GetNumberOfElements(
        array_association))

    # evaluate the query expression. The expression should return a mask which
    # is either an array or a boolean value.
    mask = None

    if len(selection_node.GetQueryString()) > 0:
      try:
        mask = eval(selection_node.GetQueryString(), globals(), new_locals)
      except NameError:
        pass

    # extract the elements from the input dataset using the mask.
    extracted_ds = ExtractElements(self, inputDS, selection, mask)

    del mask
    del new_locals
    del do
    del dsa
    return extracted_ds
Esempio n. 11
0
def _make_name_valid(name):
    return paraview.make_name_valid(name)
Esempio n. 12
0
def _make_name_valid(name):
    return paraview.make_name_valid(name)