예제 #1
0
def run_test(numProcs):
    # Run the simulation

    # Remove existing output files
    for filename in glob.glob(runOutput):
        os.remove(filename)

    print("Running simulation on " + str(numProcs) + " processors")

    # Read nxpe (number of processors in x direction) to pass to restart.redistribute, if present in BOUT.inp
    options = BoutOptionsFile(os.path.join(runOutputDir, "BOUT.inp"))
    try:
        nxpe = options["nxpe"]
    except KeyError:
        nxpe = None

    # prepare restart files
    restart.redistribute(numProcs,
                         nxpe=nxpe,
                         path=os.path.join(runOutputDir, "restart"),
                         output=runOutputDir)

    # run simulation
    print("running: " + executable)
    s, out = launch(executable, nproc=numProcs, output="test_run.log")

    return s
예제 #2
0
def generate_restarts(numProcs):
    # Read nxpe (number of processors in x direction) to pass to restart.redistribute, if present in BOUT.inp
    options = BoutOptionsFile(os.path.join(runOutputDir, "BOUT.inp"))
    try:
        nxpe = options["nxpe"]
    except KeyError:
        nxpe = None

    # prepare restart files
    restart.redistribute(numProcs,
                         nxpe=nxpe,
                         path=os.path.join(runOutputDir, "restart"),
                         output=runOutputDir)
예제 #3
0
#!/usr/bin/env python

from boutdata import restart
from sys import argv, exit

npes = int(argv[1])

try:
    restarts_directory = argv[2]
except IndexError:
    restarts_directory = "restarts_256x256"

restart.redistribute(npes, path=restarts_directory, output="data", myg=0)

exit(0)
예제 #4
0
 def redistributeProcs(self, oldDir, addType, npes):
     self.copyInpFiles(oldDir, addType)
     for i in self.scanIDs:
         os.chdir('{}/{}'.format(self.runDir, i))
         redistribute(npes=npes, path=oldDir, output=addType)
     self.nProcs = npes
예제 #5
0
파일: data.py 프로젝트: wy-dlut/BOUT-dev
    def redistribute(self,
                     npes,
                     nxpe=None,
                     mxg=2,
                     myg=2,
                     include_restarts=True):
        """Create a new set of dump files for npes processors.

        Useful for restarting simulations using more or fewer processors.

        Existing data and restart files are kept in the directory
        "redistribution_backups". redistribute() will fail if this
        directory already exists, to avoid overwriting anything

        Parameters
        ----------
        npes : int
            Number of new files to create
        nxpe : int, optional
            If nxpe is None (the default), then an 'optimal' number will be
            selected automatically
        mxg, myg : int, optional
            Number of guard cells in x, y (default: 2)
        include_restarts : bool, optional
            If True, then restart.redistribute will be used to
            redistribute the restart files also (default: True)

        """
        from boutdata.processor_rearrange import get_processor_layout, create_processor_layout
        from os import rename, path, mkdir

        # use get_processor_layout to get nx, ny
        old_processor_layout = get_processor_layout(DataFile(
            self._file_list[0]),
                                                    has_t_dimension=True,
                                                    mxg=mxg,
                                                    myg=myg)
        old_nxpe = old_processor_layout.nxpe
        old_nype = old_processor_layout.nype
        old_npes = old_processor_layout.npes
        old_mxsub = old_processor_layout.mxsub
        old_mysub = old_processor_layout.mysub
        nx = old_processor_layout.nx
        ny = old_processor_layout.ny
        mz = old_processor_layout.mz
        mxg = old_processor_layout.mxg
        myg = old_processor_layout.myg

        # calculate new processor layout
        new_processor_layout = create_processor_layout(old_processor_layout,
                                                       npes,
                                                       nxpe=nxpe)
        nxpe = new_processor_layout.nxpe
        nype = new_processor_layout.nype
        mxsub = new_processor_layout.mxsub
        mysub = new_processor_layout.mysub

        # move existing files to backup directory
        # don't overwrite backup: os.mkdir will raise exception if directory already exists
        backupdir = path.join(self._path, "redistribution_backups")
        mkdir(backupdir)
        for f in self._file_list:
            rename(f, path.join(backupdir, path.basename(f)))

        # create new output files
        outfile_list = []
        this_prefix = self._prefix
        if not this_prefix[-1] == '.':
            # ensure prefix ends with a '.'
            this_prefix = this_prefix + "."
        for i in range(npes):
            outpath = os.path.join(self._path,
                                   this_prefix + str(i) + "." + self._suffix)
            if self._suffix.split(".")[-1] in ["nc", "ncdf", "cdl"]:
                # set format option to DataFile explicitly to avoid creating netCDF3 files, which can only contain up to 2GB of data
                outfile_list.append(
                    DataFile(outpath,
                             write=True,
                             create=True,
                             format='NETCDF4'))
            else:
                outfile_list.append(DataFile(outpath, write=True, create=True))

        # Create a DataFileCache, if needed
        if self._DataFileCaching:
            DataFileCache = create_cache(backupdir, self._prefix)
        else:
            DataFileCache = None
        # read and write the data
        for v in self.varNames:
            print("processing " + v)
            data = collect(v,
                           path=backupdir,
                           prefix=self._prefix,
                           xguards=True,
                           yguards=True,
                           info=False,
                           datafile_cache=DataFileCache)
            ndims = len(data.shape)

            # write data
            for i in range(npes):
                ix = i % nxpe
                iy = int(i / nxpe)
                outfile = outfile_list[i]
                if v == "NPES":
                    outfile.write(v, npes)
                elif v == "NXPE":
                    outfile.write(v, nxpe)
                elif v == "NYPE":
                    outfile.write(v, nype)
                elif v == "MXSUB":
                    outfile.write(v, mxsub)
                elif v == "MYSUB":
                    outfile.write(v, mysub)
                elif ndims == 0:
                    # scalar
                    outfile.write(v, data)
                elif ndims == 1:
                    # time evolving scalar
                    outfile.write(v, data)
                elif ndims == 2:
                    # Field2D
                    if data.shape != (nx + 2 * mxg, ny + 2 * myg):
                        # FieldPerp?
                        # check is not perfect, fails if ny=nz
                        raise ValueError(
                            "Error: Found FieldPerp '" + v +
                            "'. This case is not currently handled by BoutOutputs.redistribute()."
                        )
                    outfile.write(
                        v, data[ix * mxsub:(ix + 1) * mxsub + 2 * mxg,
                                iy * mysub:(iy + 1) * mysub + 2 * myg])
                elif ndims == 3:
                    # Field3D
                    if data.shape[:2] != (nx + 2 * mxg, ny + 2 * myg):
                        # evolving Field2D, but this case is not handled
                        # check is not perfect, fails if ny=nx and nx=nt
                        raise ValueError(
                            "Error: Found evolving Field2D '" + v +
                            "'. This case is not currently handled by BoutOutputs.redistribute()."
                        )
                    outfile.write(
                        v, data[ix * mxsub:(ix + 1) * mxsub + 2 * mxg,
                                iy * mysub:(iy + 1) * mysub + 2 * myg, :])
                elif ndims == 4:
                    outfile.write(
                        v, data[:, ix * mxsub:(ix + 1) * mxsub + 2 * mxg,
                                iy * mysub:(iy + 1) * mysub + 2 * myg, :])
                else:
                    print(
                        "ERROR: variable found with unexpected number of dimensions,",
                        ndims)

        for outfile in outfile_list:
            outfile.close()

        if include_restarts:
            print("processing restarts")
            from boutdata import restart
            from glob import glob
            restart_prefix = "BOUT.restart"
            restarts_list = glob(path.join(self._path, restart_prefix + "*"))

            # Move existing restart files to backup directory
            for f in restarts_list:
                rename(f, path.join(backupdir, path.basename(f)))

            # Redistribute restarts
            restart.redistribute(npes,
                                 path=backupdir,
                                 nxpe=nxpe,
                                 output=self._path,
                                 mxg=mxg,
                                 myg=myg)
예제 #6
0
    def test_redistribute_connected_doublenull(self, tmp_path):
        """
        Check for a connected double-null case using a large number of processes.
        'Large' means there is at least one process in each region with no edges
        touching another region.
        """
        npes_redistributed = 6

        grid_info = make_grid_info(nxpe=3,
                                   nype=18,
                                   ixseps1=7,
                                   ixseps2=7,
                                   xpoints=2)

        fieldperp_global_yind = 19
        fieldperp_yproc_ind = 4

        rng = np.random.default_rng(108)

        restart_params = [
            # inner, lower divertor leg
            (0, -1),
            (1, -1),
            (2, -1),
            (3, -1),
            (4, -1),
            (5, -1),
            (6, -1),
            (7, -1),
            (8, -1),
            # inner core
            (9, -1),
            (10, -1),
            (11, -1),
            (12, fieldperp_global_yind),
            (13, fieldperp_global_yind),
            (14, fieldperp_global_yind),
            (15, -1),
            (16, -1),
            (17, -1),
            # inner, upper divertor leg
            (18, -1),
            (19, -1),
            (20, -1),
            (21, -1),
            (22, -1),
            (23, -1),
            (24, -1),
            (25, -1),
            (26, -1),
            # outer, upper divertor leg
            (27, -1),
            (28, -1),
            (29, -1),
            (30, -1),
            (31, -1),
            (32, -1),
            (33, -1),
            (34, -1),
            (35, -1),
            # outer core
            (36, -1),
            (37, -1),
            (38, -1),
            (39, -1),
            (40, -1),
            (41, -1),
            (42, -1),
            (43, -1),
            (44, -1),
            # outer, lower divertor leg
            (45, -1),
            (46, -1),
            (47, -1),
            (48, -1),
            (49, -1),
            (50, -1),
            (51, -1),
            (52, -1),
            (53, -1),
        ]
        restarts = []
        for i, fieldperp_yind in restart_params:
            restarts.append(
                create_restart_file(
                    tmpdir=tmp_path,
                    rng=rng,
                    grid_info=grid_info,
                    i=i,
                    fieldperp_global_yind=fieldperp_yind,
                ))

        expected = concatenate_data(
            restarts,
            nxpe=grid_info["NXPE"],
            fieldperp_yproc_ind=fieldperp_yproc_ind,
            has_t_dim=False,
        )

        new_path = tmp_path.joinpath("new_restarts")
        new_path.mkdir()
        restart.redistribute(npes=npes_redistributed,
                             path=tmp_path,
                             output=new_path)

        # Check the right number of files have been created
        new_files = glob(str(new_path.joinpath("*")))
        assert len(new_files) == npes_redistributed

        # Check data in redistributed restart files
        check_redistributed_data(
            expected,
            fieldperp_global_yind=fieldperp_global_yind,
            path=new_path,
        )
예제 #7
0
    def redistribute(self, npes, nxpe=None, mxg=2, myg=2, include_restarts=True):
        """Create a new set of dump files for npes processors.

        Useful for restarting simulations using more or fewer processors.

        Existing data and restart files are kept in the directory
        "redistribution_backups". redistribute() will fail if this
        directory already exists, to avoid overwriting anything

        Parameters
        ----------
        npes : int
            Number of new files to create
        nxpe : int, optional
            If nxpe is None (the default), then an 'optimal' number will be
            selected automatically
        mxg, myg : int, optional
            Number of guard cells in x, y (default: 2)
        include_restarts : bool, optional
            If True, then restart.redistribute will be used to
            redistribute the restart files also (default: True)

        """
        from boutdata.processor_rearrange import get_processor_layout, create_processor_layout
        from os import rename, path, mkdir

        # use get_processor_layout to get nx, ny
        old_processor_layout = get_processor_layout(
            DataFile(self._file_list[0]), has_t_dimension=True, mxg=mxg, myg=myg)
        old_nxpe = old_processor_layout.nxpe
        old_nype = old_processor_layout.nype
        old_npes = old_processor_layout.npes
        old_mxsub = old_processor_layout.mxsub
        old_mysub = old_processor_layout.mysub
        nx = old_processor_layout.nx
        ny = old_processor_layout.ny
        mz = old_processor_layout.mz
        mxg = old_processor_layout.mxg
        myg = old_processor_layout.myg

        # calculate new processor layout
        new_processor_layout = create_processor_layout(
            old_processor_layout, npes, nxpe=nxpe)
        nxpe = new_processor_layout.nxpe
        nype = new_processor_layout.nype
        mxsub = new_processor_layout.mxsub
        mysub = new_processor_layout.mysub

        # move existing files to backup directory
        # don't overwrite backup: os.mkdir will raise exception if directory already exists
        backupdir = path.join(self._path, "redistribution_backups")
        mkdir(backupdir)
        for f in self._file_list:
            rename(f, path.join(backupdir, path.basename(f)))

        # create new output files
        outfile_list = []
        this_prefix = self._prefix
        if not this_prefix[-1] == '.':
            # ensure prefix ends with a '.'
            this_prefix = this_prefix + "."
        for i in range(npes):
            outpath = os.path.join(
                self._path, this_prefix+str(i)+"."+self._suffix)
            if self._suffix.split(".")[-1] in ["nc", "ncdf", "cdl"]:
                # set format option to DataFile explicitly to avoid creating netCDF3 files, which can only contain up to 2GB of data
                outfile_list.append(
                    DataFile(outpath, write=True, create=True, format='NETCDF4'))
            else:
                outfile_list.append(DataFile(outpath, write=True, create=True))

        # Create a DataFileCache, if needed
        if self._DataFileCaching:
            DataFileCache = create_cache(backupdir, self._prefix)
        else:
            DataFileCache = None
        # read and write the data
        for v in self.varNames:
            print("processing "+v)
            data = collect(v, path=backupdir, prefix=self._prefix, xguards=True,
                           yguards=True, info=False, datafile_cache=DataFileCache)
            ndims = len(data.shape)

            # write data
            for i in range(npes):
                ix = i % nxpe
                iy = int(i/nxpe)
                outfile = outfile_list[i]
                if v == "NPES":
                    outfile.write(v, npes)
                elif v == "NXPE":
                    outfile.write(v, nxpe)
                elif v == "NYPE":
                    outfile.write(v, nype)
                elif v == "MXSUB":
                    outfile.write(v, mxsub)
                elif v == "MYSUB":
                    outfile.write(v, mysub)
                elif ndims == 0:
                    # scalar
                    outfile.write(v, data)
                elif ndims == 1:
                    # time evolving scalar
                    outfile.write(v, data)
                elif ndims == 2:
                    # Field2D
                    if data.shape != (nx + 2*mxg, ny + 2*myg):
                        # FieldPerp?
                        # check is not perfect, fails if ny=nz
                        raise ValueError(
                            "Error: Found FieldPerp '"+v+"'. This case is not currently handled by BoutOutputs.redistribute().")
                    outfile.write(
                        v, data[ix*mxsub:(ix+1)*mxsub+2*mxg, iy*mysub:(iy+1)*mysub+2*myg])
                elif ndims == 3:
                    # Field3D
                    if data.shape[:2] != (nx + 2*mxg, ny + 2*myg):
                        # evolving Field2D, but this case is not handled
                        # check is not perfect, fails if ny=nx and nx=nt
                        raise ValueError("Error: Found evolving Field2D '"+v +
                                         "'. This case is not currently handled by BoutOutputs.redistribute().")
                    outfile.write(
                        v, data[ix*mxsub:(ix+1)*mxsub+2*mxg, iy*mysub:(iy+1)*mysub+2*myg, :])
                elif ndims == 4:
                    outfile.write(
                        v, data[:, ix*mxsub:(ix+1)*mxsub+2*mxg, iy*mysub:(iy+1)*mysub+2*myg, :])
                else:
                    print(
                        "ERROR: variable found with unexpected number of dimensions,", ndims)

        for outfile in outfile_list:
            outfile.close()

        if include_restarts:
            print("processing restarts")
            from boutdata import restart
            from glob import glob
            restart_prefix = "BOUT.restart"
            restarts_list = glob(path.join(self._path, restart_prefix+"*"))

            # Move existing restart files to backup directory
            for f in restarts_list:
                rename(f, path.join(backupdir, path.basename(f)))

            # Redistribute restarts
            restart.redistribute(npes, path=backupdir,
                                 nxpe=nxpe, output=self._path, mxg=mxg, myg=myg)