Beispiel #1
0
def _ensure_cachedir(comm=None):
    """Ensure that the TSFC kernel cache directory exists."""
    comm = dup_comm(comm or COMM_WORLD)
    if comm.rank == 0:
        if not path.exists(TSFCKernel._cachedir):
            makedirs(TSFCKernel._cachedir)
    free_comm(comm)
Beispiel #2
0
    def __init__(self, filename, file_mode, comm=None):
        self.comm = dup_comm(comm or COMM_WORLD)

        self._filename = filename
        self._mode = file_mode

        exists = os.path.exists(filename)
        if file_mode == 'r' and not exists:
            raise IOError("File '%s' does not exist, cannot be opened for reading" % filename)

        # Create the directory if necessary
        dirname = os.path.dirname(filename)
        try:
            os.makedirs(dirname)
        except OSError:
            pass

        # Try to use MPI
        try:
            self._h5file = h5py.File(filename, file_mode, driver="mpio", comm=self.comm)
        except NameError:  # the error you get if h5py isn't compiled against parallel HDF5
            raise RuntimeError("h5py *must* be installed with MPI support")

        if file_mode == 'r':
            nprocs = self.attributes('/')['nprocs']
            if nprocs != self.comm.size:
                raise ValueError("Process mismatch: written on %d, have %d" %
                                 (nprocs, self.comm.size))
        else:
            self.attributes('/')['nprocs'] = self.comm.size
Beispiel #3
0
    def __init__(self, filename, file_mode, comm=None):
        self.comm = dup_comm(comm or COMM_WORLD)

        self._filename = filename
        self._mode = file_mode

        exists = os.path.exists(filename)
        if file_mode == 'r' and not exists:
            raise IOError("File '%s' does not exist, cannot be opened for reading" % filename)

        # Create the directory if necessary
        dirname = os.path.dirname(filename)
        try:
            os.makedirs(dirname)
        except OSError:
            pass

        # Try to use MPI
        try:
            self._h5file = h5py.File(filename, file_mode, driver="mpio", comm=self.comm)
        except NameError:  # the error you get if h5py isn't compiled against parallel HDF5
            raise RuntimeError("h5py *must* be installed with MPI support")

        if file_mode == 'r':
            nprocs = self.attributes('/')['nprocs']
            if nprocs != self.comm.size:
                raise ValueError("Process mismatch: written on %d, have %d" %
                                 (nprocs, self.comm.size))
        else:
            self.attributes('/')['nprocs'] = self.comm.size
Beispiel #4
0
def _from_cell_list(dim, cells, coords, comm):
    """
    Create a DMPlex from a list of cells and coords.

    :arg dim: The topological dimension of the mesh
    :arg cells: The vertices of each cell
    :arg coords: The coordinates of each vertex
    :arg comm: communicator to build the mesh on.
    """
    comm = dup_comm(comm)
    if comm.rank == 0:
        cells = np.asarray(cells, dtype=PETSc.IntType)
        coords = np.asarray(coords, dtype=float)
        comm.bcast(cells.shape, root=0)
        comm.bcast(coords.shape, root=0)
        # Provide the actual data on rank 0.
        plex = PETSc.DMPlex().createFromCellList(dim, cells, coords, comm=comm)
    else:
        cell_shape = list(comm.bcast(None, root=0))
        coord_shape = list(comm.bcast(None, root=0))
        cell_shape[0] = 0
        coord_shape[0] = 0
        # Provide empty plex on other ranks
        # A subsequent call to plex.distribute() takes care of parallel partitioning
        plex = PETSc.DMPlex().createFromCellList(dim,
                                                 np.zeros(cell_shape, dtype=PETSc.IntType),
                                                 np.zeros(coord_shape, dtype=float),
                                                 comm=comm)
    free_comm(comm)
    return plex
Beispiel #5
0
def _ensure_cachedir(comm=None):
    """Ensure that the TSFC kernel cache directory exists."""
    comm = dup_comm(comm or COMM_WORLD)
    if comm.rank == 0:
        if not path.exists(TSFCKernel._cachedir):
            makedirs(TSFCKernel._cachedir)
    free_comm(comm)
Beispiel #6
0
def clear_cache(comm=None):
    """Clear the Firedrake TSFC kernel cache."""
    comm = dup_comm(comm or COMM_WORLD)
    if comm.rank == 0:
        if path.exists(TSFCKernel._cachedir):
            import shutil
            shutil.rmtree(TSFCKernel._cachedir, ignore_errors=True)
            _ensure_cachedir(comm=comm)
    free_comm(comm)
Beispiel #7
0
def clear_cache(comm=None):
    """Clear the Firedrake TSFC kernel cache."""
    comm = dup_comm(comm or COMM_WORLD)
    if comm.rank == 0:
        if path.exists(TSFCKernel._cachedir):
            import shutil
            shutil.rmtree(TSFCKernel._cachedir, ignore_errors=True)
            _ensure_cachedir(comm=comm)
    free_comm(comm)
Beispiel #8
0
 def __init__(self, cc, ld=None, cppargs=[], ldargs=[],
              cpp=False, comm=None):
     ccenv = 'CXX' if cpp else 'CC'
     # Ensure that this is an internal communicator.
     comm = dup_comm(comm or COMM_WORLD)
     self.comm = compilation_comm(comm)
     self._cc = os.environ.get(ccenv, cc)
     self._ld = os.environ.get('LDSHARED', ld)
     self._cppargs = cppargs + configuration['cflags'].split() + self.workaround_cflags
     self._ldargs = ldargs + configuration['ldflags'].split()
Beispiel #9
0
 def __init__(self, cc, ld=None, cppargs=[], ldargs=[],
              cpp=False, comm=None):
     ccenv = 'CXX' if cpp else 'CC'
     self._cc = os.environ.get(ccenv, cc)
     self._ld = os.environ.get('LDSHARED', ld)
     self._cppargs = cppargs + configuration['cflags'].split() + self.workaround_cflags
     self._ldargs = ldargs + configuration['ldflags'].split()
     # Ensure that this is an internal communicator.
     comm = dup_comm(comm or COMM_WORLD)
     self.comm = compilation_comm(comm)
Beispiel #10
0
    def __init__(self, basename, single_file=True,
                 mode=FILE_UPDATE, comm=None):
        self.comm = dup_comm(comm or COMM_WORLD)
        self.mode = mode

        self._single = single_file
        self._made_file = False
        self._basename = basename
        self._time = None
        self._tidx = -1
        self._fidx = 0
        self.new_file()
Beispiel #11
0
    def __init__(self, basename, single_file=True,
                 mode=FILE_UPDATE, comm=None):
        self.comm = dup_comm(comm or COMM_WORLD)
        self.mode = mode

        self._single = single_file
        self._made_file = False
        self._basename = basename
        self._time = None
        self._tidx = -1
        self._fidx = 0
        self.new_file()
Beispiel #12
0
    def __init__(self, filename, project_output=False, comm=None):
        """Create an object for outputting data for visualisation.

        This produces output in VTU format, suitable for visualisation
        with Paraview or other VTK-capable visualisation packages.


        :arg filename: The name of the output file (must end in
            ``.pvd``).
        :kwarg project_output: Should the output be projected to
            linears?  Default is to use interpolation.
        :kwarg comm: The MPI communicator to use.

        .. note::

           Visualisation is only possible for linear fields (either
           continuous or discontinuous).  All other fields are first
           either projected or interpolated to linear before storing
           for visualisation purposes.
        """
        filename = os.path.abspath(filename)
        basename, ext = os.path.splitext(filename)
        if ext not in (".pvd", ):
            raise ValueError("Only output to PVD is supported")

        comm = dup_comm(comm or COMM_WORLD)

        if comm.rank == 0:
            outdir = os.path.dirname(os.path.abspath(filename))
            if not os.path.exists(outdir):
                os.makedirs(outdir)
        comm.barrier()

        self.comm = comm
        self.filename = filename
        self.basename = basename
        self.counter = itertools.count()
        self.timestep = itertools.count()
        self.project = project_output

        if self.comm.rank == 0:
            with open(self.filename, "wb") as f:
                f.write(self._header)
                f.write(self._footer)

        self._fnames = None
        self._topology = None
        self._output_functions = weakref.WeakKeyDictionary()
        self._mappers = weakref.WeakKeyDictionary()
Beispiel #13
0
    def __init__(self, filename, project_output=False, comm=None):
        """Create an object for outputting data for visualisation.

        This produces output in VTU format, suitable for visualisation
        with Paraview or other VTK-capable visualisation packages.


        :arg filename: The name of the output file (must end in
            ``.pvd``).
        :kwarg project_output: Should the output be projected to
            linears?  Default is to use interpolation.
        :kwarg comm: The MPI communicator to use.

        .. note::

           Visualisation is only possible for linear fields (either
           continuous or discontinuous).  All other fields are first
           either projected or interpolated to linear before storing
           for visualisation purposes.
        """
        filename = os.path.abspath(filename)
        basename, ext = os.path.splitext(filename)
        if ext not in (".pvd", ):
            raise ValueError("Only output to PVD is supported")

        comm = dup_comm(comm or COMM_WORLD)

        if comm.rank == 0:
            outdir = os.path.dirname(os.path.abspath(filename))
            if not os.path.exists(outdir):
                os.makedirs(outdir)
        comm.barrier()

        self.comm = comm
        self.filename = filename
        self.basename = basename
        self.counter = itertools.count()
        self.timestep = itertools.count()
        self.project = project_output

        if self.comm.rank == 0:
            with open(self.filename, "wb") as f:
                f.write(self._header)
                f.write(self._footer)

        self._fnames = None
        self._topology = None
        self._output_functions = weakref.WeakKeyDictionary()
        self._mappers = weakref.WeakKeyDictionary()
Beispiel #14
0
    def __init__(self, filename, project_output=False, comm=None, mode="w"):
        """Create an object for outputting data for visualisation.

        This produces output in VTU format, suitable for visualisation
        with Paraview or other VTK-capable visualisation packages.


        :arg filename: The name of the output file (must end in
            ``.pvd``).
        :kwarg project_output: Should the output be projected to
            linears?  Default is to use interpolation.
        :kwarg comm: The MPI communicator to use.
        :kwarg mode: "w" to overwrite any existing file, "a" to append to an existing file.

        .. note::

           Visualisation is only possible for linear fields (either
           continuous or discontinuous).  All other fields are first
           either projected or interpolated to linear before storing
           for visualisation purposes.
        """
        filename = os.path.abspath(filename)
        basename, ext = os.path.splitext(filename)
        if ext not in (".pvd", ):
            raise ValueError("Only output to PVD is supported")

        if mode not in ["w", "a"]:
            raise ValueError("Mode must be 'a' or 'w'")
        if mode == "a" and not os.path.isfile(filename):
            mode = "w"

        comm = dup_comm(comm or COMM_WORLD)

        if comm.rank == 0 and mode == "w":
            outdir = os.path.dirname(os.path.abspath(filename))
            if not os.path.exists(outdir):
                os.makedirs(outdir)
        elif comm.rank == 0 and mode == "a":
            if not os.path.exists(os.path.abspath(filename)):
                raise ValueError("Need a file to restart from.")
        comm.barrier()

        self.comm = comm
        self.filename = filename
        self.basename = basename
        self.project = project_output
        countstart = 0

        if self.comm.rank == 0 and mode == "w":
            with open(self.filename, "wb") as f:
                f.write(self._header)
                f.write(self._footer)
        elif self.comm.rank == 0 and mode == "a":
            import xml.etree.ElementTree as ET
            tree = ET.parse(os.path.abspath(filename))
            # Count how many the file already has
            for parent in tree.iter():
                for child in list(parent):
                    if child.tag != "DataSet":
                        continue
                    countstart += 1

        if mode == "a":
            # Need to communicate the count across all cores involved; default op is SUM
            countstart = self.comm.allreduce(countstart)

        self.counter = itertools.count(countstart)
        self.timestep = itertools.count(countstart)

        self._fnames = None
        self._topology = None
        self._output_functions = weakref.WeakKeyDictionary()
        self._mappers = weakref.WeakKeyDictionary()
Beispiel #15
0
    def __init__(self, plex, name, reorder, distribute):
        """Half-initialise a mesh topology.

        :arg plex: :class:`DMPlex` representing the mesh topology
        :arg name: name of the mesh
        :arg reorder: whether to reorder the mesh (bool)
        :arg distribute: whether to distribute the mesh to parallel processes
        """
        # Do some validation of the input mesh
        dmplex.validate_mesh(plex)
        utils._init()

        self._plex = plex
        self.name = name
        self.comm = dup_comm(plex.comm.tompi4py())

        # A cache of shared function space data on this mesh
        self._shared_data_cache = defaultdict(dict)

        # Cell subsets for integration over subregions
        self._subsets = {}
        # Mark exterior and interior facets
        # Note.  This must come before distribution, because otherwise
        # DMPlex will consider facets on the domain boundary to be
        # exterior, which is wrong.
        label_boundary = (self.comm.size == 1) or distribute
        dmplex.label_facets(plex, label_boundary=label_boundary)

        # Distribute the dm to all ranks
        if self.comm.size > 1 and distribute:
            # We distribute with overlap zero, in case we're going to
            # refine this mesh in parallel.  Later, when we actually use
            # it, we grow the halo.
            plex.distribute(overlap=0)

        dim = plex.getDimension()

        cStart, cEnd = plex.getHeightStratum(0)  # cells
        cell_nfacets = plex.getConeSize(cStart)

        self._grown_halos = False
        self._ufl_cell = ufl.Cell(_cells[dim][cell_nfacets])

        def callback(self):
            """Finish initialisation."""
            del self._callback
            if self.comm.size > 1:
                self._plex.distributeOverlap(1)
            self._grown_halos = True

            if reorder:
                with timed_region("Mesh: reorder"):
                    old_to_new = self._plex.getOrdering(PETSc.Mat.OrderingType.RCM).indices
                    reordering = np.empty_like(old_to_new)
                    reordering[old_to_new] = np.arange(old_to_new.size, dtype=old_to_new.dtype)
            else:
                # No reordering
                reordering = None
            self._did_reordering = bool(reorder)

            # Mark OP2 entities and derive the resulting Plex renumbering
            with timed_region("Mesh: numbering"):
                dmplex.mark_entity_classes(self._plex)
                self._entity_classes = dmplex.get_entity_classes(self._plex)
                self._plex_renumbering = dmplex.plex_renumbering(self._plex,
                                                                 self._entity_classes,
                                                                 reordering)

                # Derive a cell numbering from the Plex renumbering
                entity_dofs = np.zeros(dim+1, dtype=np.int32)
                entity_dofs[-1] = 1

                self._cell_numbering = self._plex.createSection([1], entity_dofs,
                                                                perm=self._plex_renumbering)
                entity_dofs[:] = 0
                entity_dofs[0] = 1
                self._vertex_numbering = self._plex.createSection([1], entity_dofs,
                                                                  perm=self._plex_renumbering)

                entity_dofs[:] = 0
                entity_dofs[-2] = 1
                facet_numbering = self._plex.createSection([1], entity_dofs,
                                                           perm=self._plex_renumbering)
                self._facet_ordering = dmplex.get_facet_ordering(self._plex, facet_numbering)
        self._callback = callback
Beispiel #16
0
    def __init__(self, filename, project_output=False, comm=None, restart=0):
        """Create an object for outputting data for visualisation.

        This produces output in VTU format, suitable for visualisation
        with Paraview or other VTK-capable visualisation packages.


        :arg filename: The name of the output file (must end in
            ``.pvd``).
        :kwarg project_output: Should the output be projected to
            linears?  Default is to use interpolation.
        :kwarg comm: The MPI communicator to use.
        :kwarg restart: Restart at count.

        .. note::

           Visualisation is only possible for linear fields (either
           continuous or discontinuous).  All other fields are first
           either projected or interpolated to linear before storing
           for visualisation purposes.
        """
        filename = os.path.abspath(filename)
        basename, ext = os.path.splitext(filename)
        if ext not in (".pvd", ):
            raise ValueError("Only output to PVD is supported")

        comm = dup_comm(comm or COMM_WORLD)

        if comm.rank == 0 and restart == 0:
            outdir = os.path.dirname(os.path.abspath(filename))
            if not os.path.exists(outdir):
                os.makedirs(outdir)
        elif comm.rank == 0:
            if not os.path.exists(os.path.abspath(filename)):
                raise ValueError("Need a file to restart from.")
        comm.barrier()

        self.comm = comm
        self.filename = filename
        self.basename = basename
        self.counter = itertools.count()
        self.timestep = itertools.count()
        self.project = project_output

        if self.comm.rank == 0 and restart == 0:
            with open(self.filename, "wb") as f:
                f.write(self._header)
                f.write(self._footer)
        elif self.comm.rank == 0:
            import xml.etree.ElementTree as ET
            tree = ET.parse(os.path.abspath(filename))
            # Remove parts we want to discard
            for parent in tree.iter():
                for child in list(parent):
                    if child.tag != "DataSet":
                        continue
                    if restart > 0:
                        next(self.counter)
                        next(self.timestep)
                        restart -= 1
                    else:
                        parent.remove(child)
            with open(self.filename, "wb") as f:
                tree.write(f)

        self._fnames = None
        self._topology = None
        self._output_functions = weakref.WeakKeyDictionary()
        self._mappers = weakref.WeakKeyDictionary()
Beispiel #17
0
    def __init__(self, filename, project_output=False, comm=None, mode="w"):
        """Create an object for outputting data for visualisation.

        This produces output in VTU format, suitable for visualisation
        with Paraview or other VTK-capable visualisation packages.


        :arg filename: The name of the output file (must end in
            ``.pvd``).
        :kwarg project_output: Should the output be projected to
            linears?  Default is to use interpolation.
        :kwarg comm: The MPI communicator to use.
        :kwarg mode: "w" to overwrite any existing file, "a" to append to an existing file.

        .. note::

           Visualisation is only possible for linear fields (either
           continuous or discontinuous).  All other fields are first
           either projected or interpolated to linear before storing
           for visualisation purposes.
        """
        filename = os.path.abspath(filename)
        basename, ext = os.path.splitext(filename)
        if ext not in (".pvd", ):
            raise ValueError("Only output to PVD is supported")

        if mode not in ["w", "a"]:
            raise ValueError("Mode must be 'a' or 'w'")
        if mode == "a" and not os.path.isfile(filename):
            mode = "w"

        comm = dup_comm(comm or COMM_WORLD)

        if comm.rank == 0 and mode == "w":
            outdir = os.path.dirname(os.path.abspath(filename))
            if not os.path.exists(outdir):
                os.makedirs(outdir)
        elif comm.rank == 0 and mode == "a":
            if not os.path.exists(os.path.abspath(filename)):
                raise ValueError("Need a file to restart from.")
        comm.barrier()

        self.comm = comm
        self.filename = filename
        self.basename = basename
        self.project = project_output
        countstart = 0

        if self.comm.rank == 0 and mode == "w":
            with open(self.filename, "wb") as f:
                f.write(self._header)
                f.write(self._footer)
        elif self.comm.rank == 0 and mode == "a":
            import xml.etree.ElementTree as ET
            tree = ET.parse(os.path.abspath(filename))
            # Count how many the file already has
            for parent in tree.iter():
                for child in list(parent):
                    if child.tag != "DataSet":
                        continue
                    countstart += 1

        if mode == "a":
            # Need to communicate the count across all cores involved; default op is SUM
            countstart = self.comm.allreduce(countstart)

        self.counter = itertools.count(countstart)
        self.timestep = itertools.count(countstart)

        self._fnames = None
        self._topology = None
        self._output_functions = weakref.WeakKeyDictionary()
        self._mappers = weakref.WeakKeyDictionary()
Beispiel #18
0
import numpy as np
import weakref
from collections import defaultdict

from pyop2.mpi import COMM_WORLD, MPI, dup_comm, free_comm

from firedrake.logging import debug, warning
from firedrake.parameters import parameters
from firedrake.petsc import PETSc

try:
    # Estimate the amount of memory per core may use.
    import psutil
    memory = np.array([psutil.virtual_memory().total/psutil.cpu_count()])
    if COMM_WORLD.size > 1:
        comm = dup_comm(COMM_WORLD)
        comm.Allreduce(MPI.IN_PLACE, memory, MPI.MIN)
        free_comm(comm)
except (ImportError, AttributeError):
    memory = None


class _DependencySnapshot(object):
    """Record the dependencies of a form at a particular point in order to
    establish whether a cached form is valid."""

    def __init__(self, form):

        # For each dependency, we store a weak reference and the
        # current version number.
        ref = lambda dep: (weakref.ref(dep), dep.dat._version)
Beispiel #19
0
    def __init__(self, filename, project_output=False, comm=None, mode="w",
                 target_degree=None, target_continuity=None, adaptive=False):
        """Create an object for outputting data for visualisation.

        This produces output in VTU format, suitable for visualisation
        with Paraview or other VTK-capable visualisation packages.


        :arg filename: The name of the output file (must end in
            ``.pvd``).
        :kwarg project_output: Should the output be projected to
            a computed output space?  Default is to use interpolation.
        :kwarg comm: The MPI communicator to use.
        :kwarg mode: "w" to overwrite any existing file, "a" to append to an existing file.
        :kwarg target_degree: override the degree of the output space.
        :kwarg target_continuity: override the continuity of the output space;
            A UFL :class:`~.SobolevSpace` object: `H1` for a
            continuous output and `L2` for a discontinuous output.
        :kwarg adaptive: allow different meshes at different exports if `True`.

        .. note::

           Visualisation is only possible for Lagrange fields (either
           continuous or discontinuous).  All other fields are first
           either projected or interpolated to Lagrange elements
           before storing for visualisation purposes.
        """
        filename = os.path.abspath(filename)
        basename, ext = os.path.splitext(filename)
        if ext not in (".pvd", ):
            raise ValueError("Only output to PVD is supported")

        if mode not in ["w", "a"]:
            raise ValueError("Mode must be 'a' or 'w'")
        if mode == "a" and not os.path.isfile(filename):
            mode = "w"

        comm = dup_comm(comm or COMM_WORLD)

        if comm.rank == 0 and mode == "w":
            outdir = os.path.dirname(os.path.abspath(filename))
            if not os.path.exists(outdir):
                os.makedirs(outdir)
        elif comm.rank == 0 and mode == "a":
            if not os.path.exists(os.path.abspath(filename)):
                raise ValueError("Need a file to restart from.")
        comm.barrier()

        self.comm = comm
        self.filename = filename
        self.basename = basename
        self.project = project_output
        self.target_degree = target_degree
        self.target_continuity = target_continuity
        if target_degree is not None and target_degree < 0:
            raise ValueError("Invalid target_degree")
        if target_continuity is not None and target_continuity not in {ufl.H1, ufl.L2}:
            raise ValueError("target_continuity must be either 'H1' or 'L2'.")
        countstart = 0

        if self.comm.rank == 0 and mode == "w":
            with open(self.filename, "wb") as f:
                f.write(self._header)
                f.write(self._footer)
        elif self.comm.rank == 0 and mode == "a":
            import xml.etree.ElementTree as ElTree
            tree = ElTree.parse(os.path.abspath(filename))
            # Count how many the file already has
            for parent in tree.iter():
                for child in list(parent):
                    if child.tag != "DataSet":
                        continue
                    countstart += 1

        if mode == "a":
            # Need to communicate the count across all cores involved; default op is SUM
            countstart = self.comm.allreduce(countstart)

        self.counter = itertools.count(countstart)
        self.timestep = itertools.count(countstart)

        self._fnames = None
        self._topology = None
        self._adaptive = adaptive
Beispiel #20
0
def _from_triangle(filename, dim, comm):
    """Read a set of triangle mesh files from `filename`.

    :arg dim: The embedding dimension.
    :arg comm: communicator to build the mesh on.
    """
    basename, ext = os.path.splitext(filename)

    comm = dup_comm(comm)
    if comm.rank == 0:
        try:
            facetfile = open(basename+".face")
            tdim = 3
        except:
            try:
                facetfile = open(basename+".edge")
                tdim = 2
            except:
                facetfile = None
                tdim = 1
        if dim is None:
            dim = tdim
        comm.bcast(tdim, root=0)

        with open(basename+".node") as nodefile:
            header = np.fromfile(nodefile, dtype=np.int32, count=2, sep=' ')
            nodecount = header[0]
            nodedim = header[1]
            assert nodedim == dim
            coordinates = np.loadtxt(nodefile, usecols=range(1, dim+1), skiprows=1)
            assert nodecount == coordinates.shape[0]

        with open(basename+".ele") as elefile:
            header = np.fromfile(elefile, dtype=np.int32, count=2, sep=' ')
            elecount = header[0]
            eledim = header[1]
            eles = np.loadtxt(elefile, usecols=range(1, eledim+1), dtype=np.int32, skiprows=1)
            assert elecount == eles.shape[0]

        cells = map(lambda c: c-1, eles)
    else:
        tdim = comm.bcast(None, root=0)
        cells = None
        coordinates = None
    plex = _from_cell_list(tdim, cells, coordinates, comm=comm)

    # Apply boundary IDs
    if comm.rank == 0:
        facets = None
        try:
            header = np.fromfile(facetfile, dtype=np.int32, count=2, sep=' ')
            edgecount = header[0]
            facets = np.loadtxt(facetfile, usecols=range(1, tdim+2), dtype=np.int32, skiprows=0)
            assert edgecount == facets.shape[0]
        finally:
            facetfile.close()

        if facets is not None:
            vStart, vEnd = plex.getDepthStratum(0)   # vertices
            for facet in facets:
                bid = facet[-1]
                vertices = map(lambda v: v + vStart - 1, facet[:-1])
                join = plex.getJoin(vertices)
                plex.setLabelValue("boundary_ids", join[0], bid)

    free_comm(comm)
    return plex