Esempio n. 1
0
def new_to_old(filename):
    f = DataFile(filename)

    newfile = DataFile(os.path.splitext(filename)[0] + str(".BOUT_metrics.nc"),
                       create=True)

    name_changes = {
        "g_yy": "g_22",
        "gyy": "g22",
        "gxx": "g11",
        "gxz": "g13",
        "gzz": "g33",
        "g_xx": "g_11",
        "g_xz": "g_13",
        "g_zz": "g_33"
    }

    for key in f.keys():
        name = key
        if name in name_changes:
            name = name_changes[name]
        newfile.write(name, np.asarray(f.read(key)))

    f.close()
    newfile.close()

    newfile.list()
Esempio n. 2
0
def file_import(name):
    f = DataFile(name)  # Open file
    varlist = f.list()  # Get list of all variables in file
    data = {}  # Create empty dictionary
    for v in varlist:
        data[v] = f.read(v)
    f.close()
    return data
Esempio n. 3
0
def file_import(name):
    f = DataFile(name)   # Open file
    varlist = f.list() # Get list of all variables in file
    data = {}          # Create empty dictionary
    for v in varlist:
        data[v] = f.read(v)
    f.close()
    return data
Esempio n. 4
0
def check_test():

    print("Checking output")

    numFailures = 0
    numTests = 0
    try:
        run = BoutOutputs(runOutputDir, info=False, yguards=True)
    except TypeError:
        # Option not implemented in boutdata.data
        run = BoutOutputs(runOutputDir, yguards=True)
    runExpected = DataFile(runExpectedOutput)

    # Get number of guard cells
    m_guards = (run["MXG"], run["MYG"])
    m_guards_expected = (runExpected["MXG"], runExpected["MYG"])

    # Get names of evolving variables, which we will test
    try:
        evolvingVariables = run.evolvingVariables()
    except AttributeError:
        # This part should be deleted once BOUT++ repo is updated so that the above works everywhere
        print("Warning: Updated boutdata.data not found")
        from get_evolving_fields import get_evolving_fields
        evolvingVariables = get_evolving_fields(run)

    # Test output
    for name in evolvingVariables:
        if len(run[name].shape) == 1:
            # scalars are diagnostic outputs like wall-time, so not useful to test
            continue
        # exclude second x guard cells as they are not used and may not always be set consistently
        data = testfield_slice(run[name], m_guards)
        expectedData = testfield_slice(runExpected.read(name),
                                       m_guards_expected)
        diff_max, norm_max = testfield_max(data, expectedData)
        numTests = numTests + 1
        if diff_max / norm_max > tolerance and diff_max > abs_tolerance:
            numFailures = numFailures + 1
            print("FAILURE: Test of max error " + str(numTests) + " (" + name +
                  ") failed, with diff_max/norm_max=" +
                  str(diff_max / norm_max) + " and diff_max=" + str(diff_max))
        else:
            print("Test of max error " + str(numTests) + " (" + name +
                  ") passed, with diff_max/norm_max=" +
                  str(diff_max / norm_max) + " and diff_max=" + str(diff_max))
        diff_mean, norm_mean = testfield_mean(data, expectedData)
        print("Test of mean error " + str(numTests) + " (" + name +
              ") diff_mean/norm_mean=" + str(diff_mean / norm_mean) +
              " and diff_mean=" + str(diff_mean))

    print(
        str(numTests - numFailures) + "/" + str(numTests) +
        " tests passed in " + testname)

    return numFailures, numTests
def calc_curvilinear_curvature(fname, field, grid):
    from scipy.signal import savgol_filter

    f = DataFile(str(fname), write=True)
    B = f.read("B")
    dBydz = np.zeros(np.shape(B))
    dBydx = np.zeros(np.shape(B))
    dBxdz = np.zeros(np.shape(B))
    dBzdx = np.zeros(np.shape(B))
    dx = grid.metric()["dx"]
    dz = grid.metric()["dz"]
    g_11 = grid.metric()["g_xx"]
    g_22 = grid.metric()["g_yy"]
    g_33 = grid.metric()["g_zz"]
    g_12 = 0.0
    g_13 = grid.metric()["g_xz"]
    g_23 = 0.0
    J = np.sqrt(g_11 * (g_22 * g_33 - g_23 * g_23) + g_12 *
                (g_13 * g_23 - g_12 * g_33) + g_13 *
                (g_12 * g_23 - g_22 * g_23))
    Bx_smooth = np.zeros(B.shape)
    By_smooth = np.zeros(B.shape)
    Bz_smooth = np.zeros(B.shape)

    for y in np.arange(0, B.shape[1]):
        pol, _ = grid.getPoloidalGrid(y)
        R = pol.R
        Z = pol.Z
        for x in np.arange(0, B.shape[0]):
            Bx_smooth[x, y, :] = savgol_filter(
                field.Bxfunc(R[x, :], y, Z[x, :]),
                np.int(np.ceil(B.shape[-1] / 21) // 2 * 2 + 1), 5)
            By_smooth[x, y, :] = savgol_filter(
                field.Byfunc(R[x, :], y, Z[x, :]),
                np.int(np.ceil(B.shape[-1] / 21) // 2 * 2 + 1), 5)

            dBydz[x, y, :] = calc.deriv(By_smooth[x, y, :]) / dz[x, y, :]
            dBxdz[x, y, :] = calc.deriv(Bx_smooth[x, y, :]) / dz[x, y, :]
        for z in np.arange(0, B.shape[-1]):
            Bz_smooth[:, y, z] = savgol_filter(
                field.Bzfunc(R[:, z], y, Z[:, z]),
                np.int(np.ceil(B.shape[0] / 7) // 2 * 2 + 1), 5)
            dBzdx[:, y, z] = calc.deriv(Bz_smooth[:, y, z]) / dx[:, y, z]
            dBydx[:, y, z] = calc.deriv(By_smooth[:, y, z]) / dx[:, y, z]

    bxcvx = (-1 / J) * (dBydz / B**2.)
    bxcvy = (1 / J) * ((dBxdz - dBzdx) / B**2.)
    bxcvz = (1 / J) * (dBydx / B**2.)

    f.write('bxcvz', bxcvz)
    f.write('bxcvx', bxcvx)
    f.write('bxcvy', bxcvy)
    f.close()
Esempio n. 6
0
def change_variable(filename, variable, new_value):
    f = DataFile(filename)

    newfile = DataFile(os.path.splitext(filename)[0] + str(variable) + "." +
                       str(new_value),
                       create=True)

    var_changes = {str(variable)}

    for key in f.keys():
        name = key
        if name in var_changes:
            name = name_changes[name]
        newfile.write(name, np.asarray(f.read(key)))

    f.close()
    newfile.close()

    newfile.list()
Esempio n. 7
0
def file_import(name):
    """Read all variables from file into a dictionary

    Parameters
    ----------
    name : str
        Name of file to read

    Returns
    -------
    dict
        Dictionary containing all the variables in the file
    """
    f = DataFile(name)  # Open file
    varlist = f.list()  # Get list of all variables in file
    data = {}  # Create empty dictionary
    for v in varlist:
        data[v] = f.read(v)
    f.close()
    return data
Esempio n. 8
0
def file_import(name):
    """Read all variables from file into a dictionary

    Parameters
    ----------
    name : str
        Name of file to read

    Returns
    -------
    dict
        Dictionary containing all the variables in the file
    """
    f = DataFile(name)   # Open file
    varlist = f.list() # Get list of all variables in file
    data = {}          # Create empty dictionary
    for v in varlist:
        data[v] = f.read(v)
    f.close()
    return data
Esempio n. 9
0
def resizeY(newy,
            path="data",
            output=".",
            informat="nc",
            outformat=None,
            myg=2):
    """Increase the number of Y points in restart files

    NOTE:
        * Can't overwrite

    Parameters
    ----------
    newy : int
        ny for the new file
    path : str, optional
        Path to original restart files (default: "data")
    output : str, optional
        Path to write new restart files (default: current directory)
    informat : str, optional
        File extension of original files (default: "nc")
    outformat : str, optional
        File extension of new files (default: use the same as `informat`)
    myg : int, optional
        Number of ghost points in y (default: 2)

    Returns
    -------
    True on success, else False

    TODO
    ----
    - Replace printing errors with raising `ValueError`
    - Make informat work like `redistribute`

    """

    if outformat is None:
        outformat = informat

    file_list = glob.glob(os.path.join(path, "BOUT.restart.*." + informat))

    nfiles = len(file_list)

    if nfiles == 0:
        print("ERROR: No restart files found")
        return False

    for i in range(nfiles):
        # Open each data file
        infname = os.path.join(path, "BOUT.restart." + str(i) + "." + informat)
        outfname = os.path.join(output,
                                "BOUT.restart." + str(i) + "." + outformat)

        print("Processing %s -> %s" % (infname, outfname))

        infile = DataFile(infname)
        outfile = DataFile(outfname, create=True)

        # Copy basic information
        for var in ["hist_hi", "NXPE", "NYPE", "tt"]:
            data = infile.read(var)
            try:
                # Convert to scalar if necessary
                data = data[0]
            except:
                pass
            outfile.write(var, data)

        # Get a list of variables
        varnames = infile.list()

        for var in varnames:
            if infile.ndims(var) == 3:
                # Could be an evolving variable [x,y,z]

                print(" -> Resizing " + var)

                # Read variable from input
                indata = infile.read(var)

                nx, ny, nz = indata.shape

                # y coordinate in input and output data
                iny = (arange(ny) - myg + 0.5) / (ny - 2 * myg)
                outy = (arange(newy) - myg + 0.5) / (newy - 2 * myg)

                outdata = zeros([nx, newy, nz])

                for x in range(nx):
                    for z in range(nz):
                        f = interp1d(iny,
                                     indata[x, :, z],
                                     bounds_error=False,
                                     fill_value=0.0)
                        outdata[x, :, z] = f(outy)

                outfile.write(var, outdata)
            elif infile.ndims(var) == 2:
                # Assume evolving variable [x,y]
                print(" -> Resizing " + var)

                # Read variable from input
                indata = infile.read(var)

                nx, ny = indata.shape

                # y coordinate in input and output data
                iny = (arange(ny) - myg + 0.5) / (ny - 2 * myg)
                outy = (arange(newy) - myg + 0.5) / (newy - 2 * myg)

                outdata = zeros([nx, newy])

                for x in range(nx):
                    f = interp1d(iny,
                                 indata[x, :],
                                 bounds_error=False,
                                 fill_value=0.0)
                    outdata[x, :] = f(outy)

                outfile.write(var, outdata)
            else:
                # Copy variable
                print(" -> Copying " + var)

                # Read variable from input
                data = infile.read(var)
                try:
                    # Convert to scalar if necessary
                    data = data[0]
                except:
                    pass
                outfile.write(var, data)

        infile.close()
        outfile.close()
Esempio n. 10
0
def redistribute(npes,
                 path="data",
                 nxpe=None,
                 output=".",
                 informat=None,
                 outformat=None,
                 mxg=2,
                 myg=2):
    """Resize restart files across NPES processors.

    Does not check if new processor arrangement is compatible with the
    branch cuts. In this respect :py:func:`restart.split` is
    safer. However, BOUT++ checks the topology during initialisation
    anyway so this is not too serious.

    Parameters
    ----------
    npes : int
        Number of processors for the new restart files
    path : str, optional
        Path to original restart files (default: "data")
    nxpe : int, optional
        Number of processors to use in the x-direction (determines
        split: npes = nxpe * nype). Default is None which uses the
        same algorithm as BoutMesh (but without topology information)
        to determine a suitable value for nxpe.
    output : str, optional
        Location to save new restart files (default: current directory)
    informat : str, optional
        Specify file format of old restart files (must be a suffix
        understood by DataFile, e.g. 'nc'). Default uses the format of
        the first 'BOUT.restart.*' file listed by glob.glob.
    outformat : str, optional
        Specify file format of new restart files (must be a suffix
        understood by DataFile, e.g. 'nc'). Default is to use the same
        as informat.

    Returns
    -------
    True on success

    TODO
    ----
    - Replace printing errors with raising `ValueError`

    """

    if npes <= 0:
        print("ERROR: Negative or zero number of processors")
        return False

    if path == output:
        print("ERROR: Can't overwrite restart files")
        return False

    if informat is None:
        file_list = glob.glob(os.path.join(path, "BOUT.restart.*"))
    else:
        file_list = glob.glob(os.path.join(path, "BOUT.restart.*." + informat))

    nfiles = len(file_list)

    # Read old processor layout
    f = DataFile(file_list[0])

    # Get list of variables
    var_list = f.list()
    if len(var_list) == 0:
        print("ERROR: No data found")
        return False

    old_processor_layout = get_processor_layout(f, has_t_dimension=False)
    print("Grid sizes: ", old_processor_layout.nx, old_processor_layout.ny,
          old_processor_layout.mz)

    if nfiles != old_processor_layout.npes:
        print("WARNING: Number of restart files inconsistent with NPES")
        print("Setting nfiles = " + str(old_processor_layout.npes))
        nfiles = old_processor_layout.npes

    if nfiles == 0:
        print("ERROR: No restart files found")
        return False

    informat = file_list[0].split(".")[-1]
    if outformat is None:
        outformat = informat

    try:
        new_processor_layout = create_processor_layout(old_processor_layout,
                                                       npes,
                                                       nxpe=nxpe)
    except ValueError as e:
        print("Could not find valid processor split. " + e.what())

    nx = old_processor_layout.nx
    ny = old_processor_layout.ny
    mz = old_processor_layout.mz
    mxg = old_processor_layout.mxg
    myg = old_processor_layout.myg
    old_npes = old_processor_layout.npes
    old_nxpe = old_processor_layout.nxpe
    old_nype = old_processor_layout.nype
    old_mxsub = old_processor_layout.mxsub
    old_mysub = old_processor_layout.mysub

    nxpe = new_processor_layout.nxpe
    nype = new_processor_layout.nype
    mxsub = new_processor_layout.mxsub
    mysub = new_processor_layout.mysub

    outfile_list = []
    for i in range(npes):
        outpath = os.path.join(output,
                               "BOUT.restart." + str(i) + "." + outformat)
        outfile_list.append(DataFile(outpath, write=True, create=True))
    infile_list = []
    for i in range(old_npes):
        inpath = os.path.join(path, "BOUT.restart." + str(i) + "." + outformat)
        infile_list.append(DataFile(inpath))

    for v in var_list:
        ndims = f.ndims(v)

        # collect data
        if ndims == 0:
            # scalar
            data = f.read(v)
        elif ndims == 2:
            data = np.zeros((nx + 2 * mxg, ny + 2 * myg))
            for i in range(old_npes):
                ix = i % old_nxpe
                iy = int(i / old_nxpe)
                ixstart = mxg
                if ix == 0:
                    ixstart = 0
                ixend = -mxg
                if ix == old_nxpe - 1:
                    ixend = 0
                iystart = myg
                if iy == 0:
                    iystart = 0
                iyend = -myg
                if iy == old_nype - 1:
                    iyend = 0
                data[ix * old_mxsub + ixstart:(ix + 1) * old_mxsub + 2 * mxg +
                     ixend,
                     iy * old_mysub + iystart:(iy + 1) * old_mysub + 2 * myg +
                     iyend] = infile_list[i].read(v)[ixstart:old_mxsub +
                                                     2 * mxg + ixend,
                                                     iystart:old_mysub +
                                                     2 * myg + iyend]
            data = BoutArray(data, attributes=infile_list[0].attributes(v))
        elif ndims == 3:
            data = np.zeros((nx + 2 * mxg, ny + 2 * myg, mz))
            for i in range(old_npes):
                ix = i % old_nxpe
                iy = int(i / old_nxpe)
                ixstart = mxg
                if ix == 0:
                    ixstart = 0
                ixend = -mxg
                if ix == old_nxpe - 1:
                    ixend = 0
                iystart = myg
                if iy == 0:
                    iystart = 0
                iyend = -myg
                if iy == old_nype - 1:
                    iyend = 0
                data[ix * old_mxsub + ixstart:(ix + 1) * old_mxsub + 2 * mxg +
                     ixend,
                     iy * old_mysub + iystart:(iy + 1) * old_mysub + 2 * myg +
                     iyend, :] = infile_list[i].read(v)[ixstart:old_mxsub +
                                                        2 * mxg + ixend,
                                                        iystart:old_mysub +
                                                        2 * myg + iyend, :]
            data = BoutArray(data, attributes=infile_list[0].attributes(v))
        else:
            print(
                "ERROR: variable found with unexpected number of dimensions,",
                ndims, v)
            return False

        # write data
        for i in range(npes):
            ix = i % nxpe
            iy = int(i / nxpe)
            outfile = outfile_list[i]
            if v == "NPES":
                outfile.write(v, npes)
            elif v == "NXPE":
                outfile.write(v, nxpe)
            elif v == "NYPE":
                outfile.write(v, nype)
            elif ndims == 0:
                # scalar
                outfile.write(v, data)
            elif ndims == 2:
                # Field2D
                outfile.write(
                    v, data[ix * mxsub:(ix + 1) * mxsub + 2 * mxg,
                            iy * mysub:(iy + 1) * mysub + 2 * myg])
            elif ndims == 3:
                # Field3D
                outfile.write(
                    v, data[ix * mxsub:(ix + 1) * mxsub + 2 * mxg,
                            iy * mysub:(iy + 1) * mysub + 2 * myg, :])
            else:
                print(
                    "ERROR: variable found with unexpected number of dimensions,",
                    f.ndims(v))

    f.close()
    for infile in infile_list:
        infile.close()
    for outfile in outfile_list:
        outfile.close()

    return True
Esempio n. 11
0
def resizeY(newy, path="data", output=".", informat="nc", outformat=None, myg=2):
    """
    Resize all the restart files in Y
    """

    if outformat is None:
        outformat = informat

    file_list = glob.glob(os.path.join(path, "BOUT.restart.*." + informat))

    nfiles = len(file_list)

    if nfiles == 0:
        print("ERROR: No restart files found")
        return False

    for i in range(nfiles):
        # Open each data file
        infname = os.path.join(path, "BOUT.restart." + str(i) + "." + informat)
        outfname = os.path.join(output, "BOUT.restart." + str(i) + "." + outformat)

        print("Processing %s -> %s", infname, outfname)

        infile = DataFile(infname)
        outfile = DataFile(outfname, create=True)

        # Copy basic information
        for var in ["hist_hi", "NPES", "NXPE", "tt"]:
            data = infile.read(var)
            try:
                # Convert to scalar if necessary
                data = data[0]
            except:
                pass
            outfile.write(var, data)

        # Get a list of variables
        varnames = infile.list()

        for var in varnames:
            if infile.ndims(var) == 3:
                # Could be an evolving variable [x,y,z]

                print(" -> " + var)

                # Read variable from input
                indata = infile.read(var)

                nx, ny, nz = indata.shape

                # y coordinate in input and output data
                iny = (arange(ny) - myg + 0.5) / (ny - 2 * myg)
                outy = (arange(newy) - myg + 0.5) / (newy - 2 * myg)

                outdata = zeros([nx, newy, nz])

                for x in range(nx):
                    for z in range(nz):
                        f = interp1d(iny, indata[x, :, z], bounds_error=False, fill_value=0.0)
                        outdata[x, :, z] = f(outy)

                outfile.write(var, outdata)
        infile.close()
        outfile.close()
Esempio n. 12
0
def redistribute(npes, path="data", nxpe=None, output=".", informat=None, outformat=None, mxg=2, myg=2):
    """Resize restart files across NPES processors.

    Does not check if new processor arrangement is compatible with the branch cuts. In this respect restart.split is safer. However, BOUT++ checks the topology during initialisation anyway so this is not too serious.

    Parameters
    ----------
    npes : int
        number of processors for the new restart files
    path : string, optional
        location of old restart files
    nxpe : int, optional
        number of processors to use in the x-direction (determines split: npes = nxpe * nype). Default is None which uses the same algorithm as BoutMesh (but without topology information) to determine a suitable value for nxpe.
    output : string, optional
        location to save new restart files
    informat : string, optional
        specify file format of old restart files (must be a suffix understood by DataFile, e.g. 'nc'). Default uses the format of the first 'BOUT.restart.*' file listed by glob.glob.
    outformat : string, optional
        specify file format of new restart files (must be a suffix understood by DataFile, e.g. 'nc'). Default is to use the same as informat.

    Returns
    -------
    True on success
    """

    if npes <= 0:
        print("ERROR: Negative or zero number of processors")
        return False

    if path == output:
        print("ERROR: Can't overwrite restart files")
        return False

    if informat is None:
        file_list = glob.glob(os.path.join(path, "BOUT.restart.*"))
    else:
        file_list = glob.glob(os.path.join(path, "BOUT.restart.*." + informat))

    nfiles = len(file_list)

    # Read old processor layout
    f = DataFile(file_list[0])

    # Get list of variables
    var_list = f.list()
    if len(var_list) == 0:
        print("ERROR: No data found")
        return False

    old_npes = f.read("NPES")
    old_nxpe = f.read("NXPE")
    old_nype = int(old_npes / old_nxpe)

    if nfiles != old_npes:
        print("WARNING: Number of restart files inconsistent with NPES")
        print("Setting nfiles = " + str(old_npes))
        nfiles = old_npes

    if nfiles == 0:
        print("ERROR: No restart files found")
        return False

    informat = file_list[0].split(".")[-1]
    if outformat is None:
        outformat = informat

    old_mxsub = 0
    old_mysub = 0
    mz = 0

    for v in var_list:
        if f.ndims(v) == 3:
            s = f.size(v)
            old_mxsub = s[0] - 2 * mxg
            if old_mxsub < 0:
                if s[0] == 1:
                    old_mxsub = 1
                    mxg = 0
                elif s[0] == 3:
                    old_mxsub = 1
                    mxg = 1
                else:
                    print("Number of x points is wrong?")
                    return False

            old_mysub = s[1] - 2 * myg
            if old_mysub < 0:
                if s[1] == 1:
                    old_mysub = 1
                    myg = 0
                elif s[1] == 3:
                    old_mysub = 1
                    myg = 1
                else:
                    print("Number of y points is wrong?")
                    return False

            mz = s[2]
            break

    # Calculate total size of the grid
    nx = old_mxsub * old_nxpe
    ny = old_mysub * old_nype
    print("Grid sizes: ", nx, ny, mz)

    if nxpe is None:  # Copy algorithm from BoutMesh for selecting nxpe
        ideal = sqrt(float(nx) * float(npes) / float(ny))  # Results in square domain

        for i in range(1, npes + 1):
            if npes % i == 0 and nx % i == 0 and int(nx / i) >= mxg and ny % (npes / i) == 0:
                # Found an acceptable value
                # Warning: does not check branch cuts!

                if nxpe is None or abs(ideal - i) < abs(ideal - nxpe):
                    nxpe = i  # Keep value nearest to the ideal

        if nxpe is None:
            print("ERROR: could not find a valid value for nxpe")
            return False

    nype = int(npes / nxpe)

    outfile_list = []
    for i in range(npes):
        outpath = os.path.join(output, "BOUT.restart." + str(i) + "." + outformat)
        outfile_list.append(DataFile(outpath, write=True, create=True))
    infile_list = []
    for i in range(old_npes):
        inpath = os.path.join(path, "BOUT.restart." + str(i) + "." + outformat)
        infile_list.append(DataFile(inpath))

    old_mxsub = int(nx / old_nxpe)
    old_mysub = int(ny / old_nype)
    mxsub = int(nx / nxpe)
    mysub = int(ny / nype)
    for v in var_list:
        ndims = f.ndims(v)

        # collect data
        if ndims == 0:
            # scalar
            data = f.read(v)
        elif ndims == 2:
            data = np.zeros((nx + 2 * mxg, ny + 2 * myg))
            for i in range(old_npes):
                ix = i % old_nxpe
                iy = int(i / old_nxpe)
                ixstart = mxg
                if ix == 0:
                    ixstart = 0
                ixend = -mxg
                if ix == old_nxpe - 1:
                    ixend = 0
                iystart = myg
                if iy == 0:
                    iystart = 0
                iyend = -myg
                if iy == old_nype - 1:
                    iyend = 0
                data[
                    ix * old_mxsub + ixstart : (ix + 1) * old_mxsub + 2 * mxg + ixend,
                    iy * old_mysub + iystart : (iy + 1) * old_mysub + 2 * myg + iyend,
                ] = infile_list[i].read(v)[ixstart : old_mxsub + 2 * mxg + ixend, iystart : old_mysub + 2 * myg + iyend]
        elif ndims == 3:
            data = np.zeros((nx + 2 * mxg, ny + 2 * myg, mz))
            for i in range(old_npes):
                ix = i % old_nxpe
                iy = int(i / old_nxpe)
                ixstart = mxg
                if ix == 0:
                    ixstart = 0
                ixend = -mxg
                if ix == old_nxpe - 1:
                    ixend = 0
                iystart = myg
                if iy == 0:
                    iystart = 0
                iyend = -myg
                if iy == old_nype - 1:
                    iyend = 0
                data[
                    ix * old_mxsub + ixstart : (ix + 1) * old_mxsub + 2 * mxg + ixend,
                    iy * old_mysub + iystart : (iy + 1) * old_mysub + 2 * myg + iyend,
                    :,
                ] = infile_list[i].read(v)[
                    ixstart : old_mxsub + 2 * mxg + ixend, iystart : old_mysub + 2 * myg + iyend, :
                ]
        else:
            print("ERROR: variable found with unexpected number of dimensions,", ndims, v)
            return False

        # write data
        for i in range(npes):
            ix = i % nxpe
            iy = int(i / nxpe)
            outfile = outfile_list[i]
            if v == "NPES":
                outfile.write(v, npes)
            elif v == "NXPE":
                outfile.write(v, nxpe)
            elif ndims == 0:
                # scalar
                outfile.write(v, data)
            elif ndims == 2:
                # Field2D
                outfile.write(v, data[ix * mxsub : (ix + 1) * mxsub + 2 * mxg, iy * mysub : (iy + 1) * mysub + 2 * myg])
            elif ndims == 3:
                # Field3D
                outfile.write(
                    v, data[ix * mxsub : (ix + 1) * mxsub + 2 * mxg, iy * mysub : (iy + 1) * mysub + 2 * myg, :]
                )
            else:
                print("ERROR: variable found with unexpected number of dimensions,", f.ndims(v))

    f.close()
    for infile in infile_list:
        infile.close()
    for outfile in outfile_list:
        outfile.close()

    return True
Esempio n. 13
0
def redistribute(npes, path="data", nxpe=None, output=".", informat=None, outformat=None, mxg=2, myg=2):
    """Resize restart files across NPES processors.

    Does not check if new processor arrangement is compatible with the
    branch cuts. In this respect :py:func:`restart.split` is
    safer. However, BOUT++ checks the topology during initialisation
    anyway so this is not too serious.

    Parameters
    ----------
    npes : int
        Number of processors for the new restart files
    path : str, optional
        Path to original restart files (default: "data")
    nxpe : int, optional
        Number of processors to use in the x-direction (determines
        split: npes = nxpe * nype). Default is None which uses the
        same algorithm as BoutMesh (but without topology information)
        to determine a suitable value for nxpe.
    output : str, optional
        Location to save new restart files (default: current directory)
    informat : str, optional
        Specify file format of old restart files (must be a suffix
        understood by DataFile, e.g. 'nc'). Default uses the format of
        the first 'BOUT.restart.*' file listed by glob.glob.
    outformat : str, optional
        Specify file format of new restart files (must be a suffix
        understood by DataFile, e.g. 'nc'). Default is to use the same
        as informat.

    Returns
    -------
    True on success

    TODO
    ----
    - Replace printing errors with raising `ValueError`

    """

    if npes <= 0:
        print("ERROR: Negative or zero number of processors")
        return False

    if path == output:
        print("ERROR: Can't overwrite restart files")
        return False

    if informat is None:
        file_list = glob.glob(os.path.join(path, "BOUT.restart.*"))
    else:
        file_list = glob.glob(os.path.join(path, "BOUT.restart.*."+informat))

    nfiles = len(file_list)

    # Read old processor layout
    f = DataFile(file_list[0])

    # Get list of variables
    var_list = f.list()
    if len(var_list) == 0:
        print("ERROR: No data found")
        return False

    old_processor_layout = get_processor_layout(f, has_t_dimension=False)
    print("Grid sizes: ", old_processor_layout.nx,
          old_processor_layout.ny, old_processor_layout.mz)

    if nfiles != old_processor_layout.npes:
        print("WARNING: Number of restart files inconsistent with NPES")
        print("Setting nfiles = " + str(old_processor_layout.npes))
        nfiles = old_processor_layout.npes

    if nfiles == 0:
        print("ERROR: No restart files found")
        return False

    informat = file_list[0].split(".")[-1]
    if outformat is None:
        outformat = informat

    try:
        new_processor_layout = create_processor_layout(
            old_processor_layout, npes, nxpe=nxpe)
    except ValueError as e:
        print("Could not find valid processor split. " + e.what())

    nx = old_processor_layout.nx
    ny = old_processor_layout.ny
    mz = old_processor_layout.mz
    mxg = old_processor_layout.mxg
    myg = old_processor_layout.myg
    old_npes = old_processor_layout.npes
    old_nxpe = old_processor_layout.nxpe
    old_nype = old_processor_layout.nype
    old_mxsub = old_processor_layout.mxsub
    old_mysub = old_processor_layout.mysub

    nxpe = new_processor_layout.nxpe
    nype = new_processor_layout.nype
    mxsub = new_processor_layout.mxsub
    mysub = new_processor_layout.mysub

    outfile_list = []
    for i in range(npes):
        outpath = os.path.join(output, "BOUT.restart."+str(i)+"."+outformat)
        outfile_list.append(DataFile(outpath, write=True, create=True))
    infile_list = []
    for i in range(old_npes):
        inpath = os.path.join(path, "BOUT.restart."+str(i)+"."+outformat)
        infile_list.append(DataFile(inpath))

    for v in var_list:
        ndims = f.ndims(v)

        # collect data
        if ndims == 0:
            # scalar
            data = f.read(v)
        elif ndims == 2:
            data = np.zeros((nx+2*mxg, ny+2*myg))
            for i in range(old_npes):
                ix = i % old_nxpe
                iy = int(i/old_nxpe)
                ixstart = mxg
                if ix == 0:
                    ixstart = 0
                ixend = -mxg
                if ix == old_nxpe-1:
                    ixend = 0
                iystart = myg
                if iy == 0:
                    iystart = 0
                iyend = -myg
                if iy == old_nype-1:
                    iyend = 0
                data[ix*old_mxsub+ixstart:(ix+1)*old_mxsub+2*mxg+ixend,
                     iy*old_mysub+iystart:(iy+1)*old_mysub+2*myg+iyend] = infile_list[i].read(v)[ixstart:old_mxsub+2*mxg+ixend, iystart:old_mysub+2*myg+iyend]
            data = BoutArray(data, attributes=infile_list[0].attributes(v))
        elif ndims == 3:
            data = np.zeros((nx+2*mxg, ny+2*myg, mz))
            for i in range(old_npes):
                ix = i % old_nxpe
                iy = int(i/old_nxpe)
                ixstart = mxg
                if ix == 0:
                    ixstart = 0
                ixend = -mxg
                if ix == old_nxpe-1:
                    ixend = 0
                iystart = myg
                if iy == 0:
                    iystart = 0
                iyend = -myg
                if iy == old_nype-1:
                    iyend = 0
                data[ix*old_mxsub+ixstart:(ix+1)*old_mxsub+2*mxg+ixend, iy*old_mysub+iystart:(iy+1)*old_mysub+2*myg+iyend,
                     :] = infile_list[i].read(v)[ixstart:old_mxsub+2*mxg+ixend, iystart:old_mysub+2*myg+iyend, :]
            data = BoutArray(data, attributes=infile_list[0].attributes(v))
        else:
            print("ERROR: variable found with unexpected number of dimensions,", ndims, v)
            return False

        # write data
        for i in range(npes):
            ix = i % nxpe
            iy = int(i/nxpe)
            outfile = outfile_list[i]
            if v == "NPES":
                outfile.write(v, npes)
            elif v == "NXPE":
                outfile.write(v, nxpe)
            elif v == "NYPE":
                outfile.write(v, nype)
            elif ndims == 0:
                # scalar
                outfile.write(v, data)
            elif ndims == 2:
                # Field2D
                outfile.write(
                    v, data[ix*mxsub:(ix+1)*mxsub+2*mxg, iy*mysub:(iy+1)*mysub+2*myg])
            elif ndims == 3:
                # Field3D
                outfile.write(
                    v, data[ix*mxsub:(ix+1)*mxsub+2*mxg, iy*mysub:(iy+1)*mysub+2*myg, :])
            else:
                print(
                    "ERROR: variable found with unexpected number of dimensions,", f.ndims(v))

    f.close()
    for infile in infile_list:
        infile.close()
    for outfile in outfile_list:
        outfile.close()

    return True
Esempio n. 14
0
def calc_com_velocity(path=".",
                      fname="rot_ell.curv.68.16.128.Ic_02.nc",
                      tmax=-1,
                      track_peak=False):
    """
   
    input:

    return:

    """

    n = collect("Ne", path=path, tind=[0, tmax], info=False)
    t_array = collect("t_array", path=path, tind=[0, tmax], info=False)
    wci = collect("Omega_ci", path=path, tind=[0, tmax], info=False)
    dt = (t_array[1] - t_array[0]) / wci

    nt = n.shape[0]
    nx = n.shape[1]
    ny = n.shape[2]
    nz = n.shape[3]

    if fname is not None:
        fdata = DataFile(fname)

        R = fdata.read("R")
        Z = fdata.read("Z")

    else:
        R = np.zeros((nx, ny, nz))
        Z = np.zeros((nx, ny, nz))
        rhos = collect('rho_s0', path=path, tind=[0, tmax])
        Rxy = collect("R0", path=path, info=False) * rhos
        dx = (collect('dx', path=path, tind=[0, tmax], info=False) * rhos *
              rhos / (Rxy))[0, 0]
        dz = (collect('dz', path=path, tind=[0, tmax], info=False) * Rxy)
        for i in np.arange(0, nx):
            for j in np.arange(0, ny):
                R[i, j, :] = dx * i
                for k in np.arange(0, nz):
                    Z[i, j, k] = dz * k

    max_ind = np.zeros((nt, ny))
    fwhd = np.zeros((nt, nx, ny, nz))
    xval = np.zeros((nt, ny), dtype='int')
    zval = np.zeros((nt, ny), dtype='int')
    xpeakval = np.zeros((nt, ny))
    zpeakval = np.zeros((nt, ny))
    Rpos = np.zeros((nt, ny))
    Zpos = np.zeros((nt, ny))
    pos = np.zeros((nt, ny))
    vr = np.zeros((nt, ny))
    vz = np.zeros((nt, ny))
    vtot = np.zeros((nt, ny))
    pos_fit = np.zeros((nt, ny))
    v_fit = np.zeros((nt, ny))
    Zposfit = np.zeros((nt, ny))
    RZposfit = np.zeros((nt, ny))

    for y in np.arange(0, ny):
        for t in np.arange(0, nt):

            data = n[t, :, y, :]
            nmax, nmin = np.amax((data[:, :])), np.amin((data[:, :]))
            data[data < (nmin + 0.368 * (nmax - nmin))] = 0
            fwhd[t, :, y, :] = data
            ntot = np.sum(data[:, :])
            zval_float = np.sum(np.sum(data[:, :], axis=0) *
                                (np.arange(nz))) / ntot
            xval_float = np.sum(np.sum(data[:, :], axis=1) *
                                (np.arange(nx))) / ntot

            xval[t, y] = int(np.round(xval_float))
            zval[t, y] = int(np.round(zval_float))

            xpos, zpos = np.where(data[:, :] == nmax)
            xpeakval[t, y] = xpos[0]
            zpeakval[t, y] = zpos[0]

            if track_peak:
                Rpos[t, y] = R[int(xpeakval[t, y]), y, int(zpeakval[t, y])]
                Zpos[t, y] = Z[int(xpeakval[t, y]), y, int(zpeakval[t, y])]
            else:
                Rpos[t, y] = R[xval[t, y], y, zval[t, y]]
                Zpos[t, y] = Z[xval[t, y], y, zval[t, y]]

        pos[:, y] = np.sqrt((Rpos[:, y] - Rpos[0, y])**2)
        z1 = np.polyfit(t_array[:], pos[:, y], 5)
        f = np.poly1d(z1)
        pos_fit[:, y] = f(t_array[:])

        t_cross = np.where(pos_fit[:, y] > pos[:, y])[0]
        t_cross = 0  # t_cross[0]

        pos_fit[:t_cross, y] = pos[:t_cross, y]

        z1 = np.polyfit(t_array[:], pos[:, y], 5)
        f = np.poly1d(z1)
        pos_fit[:, y] = f(t_array[:])

        v_fit[:, y] = calc.deriv(pos_fit[:, y]) / dt

        posunique, pos_index = np.unique(pos[:, y], return_index=True)
        pos_index = np.sort(pos_index)
        XX = np.vstack(
            (t_array[:]**5, t_array[:]**4, t_array[:]**3, t_array[:]**2,
             t_array[:], pos[pos_index[0], y] * np.ones_like(t_array[:]))).T

        pos_fit_no_offset = np.linalg.lstsq(XX[pos_index, :-2], pos[pos_index,
                                                                    y])[0]
        pos_fit[:, y] = np.dot(pos_fit_no_offset, XX[:, :-2].T)
        v_fit[:, y] = calc.deriv(pos_fit[:, y]) / dt

    return v_fit[:, 0], pos_fit[:, 0], pos[:, 0], Rpos[:, 0], Zpos[:,
                                                                   0], t_cross
Esempio n. 15
0
def collect(varname, xind=None, yind=None, zind=None, tind=None, path=".",yguards=False, xguards=True, info=True,prefix="BOUT.dmp",strict=False):
    """Collect a variable from a set of BOUT++ outputs.

    data = collect(name)

    name   Name of the variable (string)

    Optional arguments:

    xind = [min,max]   Range of X indices to collect
    yind = [min,max]   Range of Y indices to collect
    zind = [min,max]   Range of Z indices to collect
    tind = [min,max]   Range of T indices to collect

    path    = "."          Path to data files
    prefix  = "BOUT.dmp"   File prefix
    yguards = False        Collect Y boundary guard cells?
    xguards = True         Collect X boundary guard cells?
                           (Set to True to be consistent with the
                           definition of nx)
    info    = True         Print information about collect?
    strict  = False        Fail if the exact variable name is not found?
    """

    # Search for BOUT++ dump files in NetCDF format
    file_list_nc = glob.glob(os.path.join(path, prefix+".nc"))
    file_list_h5 = glob.glob(os.path.join(path, prefix+".hdf5"))
    if file_list_nc != [] and file_list_h5 != []:
        raise IOError("Error: Both NetCDF and HDF5 files are present: do not know which to read.")
    elif file_list_h5 != []:
        suffix = ".hdf5"
        file_list = file_list_h5
    else:
        suffix = ".nc"
        file_list = file_list_nc
    if file_list != []:
        print("Single (parallel) data file")
        f = DataFile(file_list[0]) # Open the file

        data = f.read(varname)
        return data
    
    file_list_nc = glob.glob(os.path.join(path, prefix+".*nc"))
    file_list_h5 = glob.glob(os.path.join(path, prefix+".*hdf5"))
    if file_list_nc != [] and file_list_h5 != []:
        raise IOError("Error: Both NetCDF and HDF5 files are present: do not know which to read.")
    elif file_list_h5 != []:
        suffix = ".hdf5"
        file_list = file_list_h5
    else:
        suffix = ".nc"
        file_list = file_list_nc
        
    file_list.sort()
    if file_list == []:
        raise IOError("ERROR: No data files found")

    nfiles = len(file_list)

    # Read data from the first file
    f = DataFile(file_list[0])

    try:
        dimens = f.dimensions(varname)
        #ndims = len(dimens)
        ndims = f.ndims(varname)
    except:
        if strict:
            raise
        else:
            # Find the variable
            varname = findVar(varname, f.list())
            
            dimens = f.dimensions(varname)
            #ndims = len(dimens)
            ndims = f.ndims(varname)
    
    if ndims < 2:
        # Just read from file
        data = f.read(varname)
        f.close()
        return data

    if ndims > 4:
        raise ValueError("ERROR: Too many dimensions")

    mxsub = f.read("MXSUB")
    if mxsub is None:
        raise ValueError("Missing MXSUB variable")
    mysub = f.read("MYSUB")
    mz    = f.read("MZ")
    myg   = f.read("MYG")
    t_array = f.read("t_array")
    if t_array is None:
        nt = 1
        t_array = np.zeros(1)
    else:
        nt = len(t_array)

    if info:
        print("mxsub = %d mysub = %d mz = %d\n" % (mxsub, mysub, mz))

    # Get the version of BOUT++ (should be > 0.6 for NetCDF anyway)
    try:
        v = f.read("BOUT_VERSION")

        # 2D decomposition
        nxpe = f.read("NXPE")
        mxg  = f.read("MXG")
        nype = f.read("NYPE")
        npe = nxpe * nype

        if info:
            print("nxpe = %d, nype = %d, npe = %d\n" % (nxpe, nype, npe))
            if npe < nfiles:
                print("WARNING: More files than expected (" + str(npe) + ")")
            elif npe > nfiles:
                print("WARNING: Some files missing. Expected " + str(npe))

        if xguards:
            nx = nxpe * mxsub + 2*mxg
        else:
            nx = nxpe * mxsub
    except KeyError:
        print("BOUT++ version : Pre-0.2")
        # Assume number of files is correct
        # No decomposition in X
        nx = mxsub
        mxg = 0
        nxpe = 1
        nype = nfiles

    if yguards:
        ny = mysub * nype + 2*myg
    else:
        ny = mysub * nype

    f.close();

    # Check ranges

    def check_range(r, low, up, name="range"):
        r2 = r
        if r != None:
            try:
                n = len(r2)
            except:
                # No len attribute, so probably a single number
                r2 = [r2,r2]
            if (len(r2) < 1) or (len(r2) > 2):
                print("WARNING: "+name+" must be [min, max]")
                r2 = None
            else:
                if len(r2) == 1:
                    r2 = [r2,r2]
                if r2[0] < low:
                    r2[0] = low
                if r2[0] > up:
                    r2[0] = up
                if r2[1] < 0:
                    r2[1] = 0
                if r2[1] > up:
                    r2[1] = up
                if r2[0] > r2[1]:
                    tmp = r2[0]
                    r2[0] = r2[1]
                    r2[1] = tmp
        else:
            r2 = [low, up]
        return r2

    xind = check_range(xind, 0, nx-1, "xind")
    yind = check_range(yind, 0, ny-1, "yind")
    zind = check_range(zind, 0, mz-2, "zind")
    tind = check_range(tind, 0, nt-1, "tind")

    xsize = xind[1] - xind[0] + 1
    ysize = yind[1] - yind[0] + 1
    zsize = zind[1] - zind[0] + 1
    tsize = tind[1] - tind[0] + 1

    # Map between dimension names and output size
    sizes = {'x':xsize, 'y':ysize, 'z':zsize, 't':tsize}

    # Create a list with size of each dimension
    ddims = [sizes[d] for d in dimens]

    # Create the data array
    data = np.zeros(ddims)

    for i in range(npe):
        # Get X and Y processor indices
        pe_yind = int(i/nxpe)
        pe_xind = i % nxpe

        inrange = True

        if yguards:
            # Get local ranges
            ymin = yind[0] - pe_yind*mysub
            ymax = yind[1] - pe_yind*mysub

            # Check lower y boundary
            if pe_yind == 0:
                # Keeping inner boundary
                if ymax < 0: inrange = False
                if ymin < 0: ymin = 0
            else:
                if ymax < myg: inrange = False
                if ymin < myg: ymin = myg

            # Upper y boundary
            if pe_yind == (nype - 1):
                # Keeping outer boundary
                if ymin >= (mysub + 2*myg): inrange = False
                if ymax > (mysub + 2*myg - 1): ymax = (mysub + 2*myg - 1)
            else:
                if ymin >= (mysub + myg): inrange = False
                if ymax >= (mysub + myg): ymax = (mysub+myg-1)

            # Calculate global indices
            ygmin = ymin + pe_yind * mysub
            ygmax = ymax + pe_yind * mysub

        else:
            # Get local ranges
            ymin = yind[0] - pe_yind*mysub + myg
            ymax = yind[1] - pe_yind*mysub + myg

            if (ymin >= (mysub + myg)) or (ymax < myg):
                inrange = False # Y out of range

            if ymin < myg:
                ymin = myg
            if ymax >= mysub+myg:
                ymax = myg + mysub - 1

            # Calculate global indices
            ygmin = ymin + pe_yind * mysub - myg
            ygmax = ymax + pe_yind * mysub - myg

        if xguards:
            # Get local ranges
            xmin = xind[0] - pe_xind*mxsub
            xmax = xind[1] - pe_xind*mxsub

            # Check lower x boundary
            if pe_xind == 0:
                # Keeping inner boundary
                if xmax < 0: inrange = False
                if xmin < 0: xmin = 0
            else:
                if xmax < mxg: inrange = False
                if xmin < mxg: xmin = mxg

            # Upper x boundary
            if pe_xind == (nxpe - 1):
                # Keeping outer boundary
                if xmin >= (mxsub + 2*mxg): inrange = False
                if xmax > (mxsub + 2*mxg - 1): xmax = (mxsub + 2*mxg - 1)
            else:
                if xmin >= (mxsub + mxg): inrange = False
                if xmax >= (mxsub + mxg): xmax = (mxsub+mxg-1)

            # Calculate global indices
            xgmin = xmin + pe_xind * mxsub
            xgmax = xmax + pe_xind * mxsub

        else:
            # Get local ranges
            xmin = xind[0] - pe_xind*mxsub + mxg
            xmax = xind[1] - pe_xind*mxsub + mxg

            if (xmin >= (mxsub + mxg)) or (xmax < mxg):
                inrange = False # X out of range

            if xmin < mxg:
                xmin = mxg
            if xmax >= mxsub+mxg:
                xmax = mxg + mxsub - 1

            # Calculate global indices
            xgmin = xmin + pe_xind * mxsub - mxg
            xgmax = xmax + pe_xind * mxsub - mxg


        # Number of local values
        nx_loc = xmax - xmin + 1
        ny_loc = ymax - ymin + 1

        if not inrange:
            continue # Don't need this file
        
        filename = os.path.join(path, prefix+"." + str(i) + suffix)
        if info:
            sys.stdout.write("\rReading from " + filename + ": [" + \
                                 str(xmin) + "-" + str(xmax) + "][" + \
                                 str(ymin) + "-" + str(ymax) + "] -> [" + \
                                 str(xgmin) + "-" + str(xgmax) + "][" + \
                                 str(ygmin) + "-" + str(ygmax) + "]")

        f = DataFile(filename)

        if ndims == 4:
            d = f.read(varname, ranges=[tind[0],tind[1]+1,
                                        xmin, xmax+1,
                                        ymin, ymax+1,
                                        zind[0],zind[1]+1])
            data[:, (xgmin-xind[0]):(xgmin-xind[0]+nx_loc), (ygmin-yind[0]):(ygmin-yind[0]+ny_loc), :] = d
        elif ndims == 3:
            # Could be xyz or txy

            if dimens[2] == 'z': # xyz
                d = f.read(varname, ranges=[xmin, xmax+1,
                                            ymin, ymax+1,
                                            zind[0],zind[1]+1])
                data[(xgmin-xind[0]):(xgmin-xind[0]+nx_loc), (ygmin-yind[0]):(ygmin-yind[0]+ny_loc), :] = d
            else: # txy
                d = f.read(varname, ranges=[tind[0],tind[1]+1,
                                            xmin, xmax+1,
                                            ymin, ymax+1])
                data[:, (xgmin-xind[0]):(xgmin-xind[0]+nx_loc), (ygmin-yind[0]):(ygmin-yind[0]+ny_loc)] = d
        elif ndims == 2:
            # xy
            d = f.read(varname, ranges=[xmin, xmax+1,
                                        ymin, ymax+1])
            data[(xgmin-xind[0]):(xgmin-xind[0]+nx_loc), (ygmin-yind[0]):(ygmin-yind[0]+ny_loc)] = d
        elif ndims == 1:
            if dimens[0] == 't':
                # t
                d = f.read(varname, ranges=[tind[0],tind[1]+1])
                data[:] = d

        f.close()

    # Force the precision of arrays of dimension>1
    if ndims>1:
        try:
            data = data.astype(t_array.dtype, copy=False)
        except TypeError:
            data = data.astype(t_array.dtype)

    # Finished looping over all files
    if info:
        sys.stdout.write("\n")
    return data
Esempio n. 16
0
def create(averagelast=1, final=-1, path="data", output="./", informat="nc", outformat=None):
    """Create restart files from data (dmp) files.

    Parameters
    ----------
    averagelast : int, optional
        Number of time points (counting from `final`, inclusive) to
        average over (default is 1 i.e. just take last time-point)
    final : int, optional
        The last time point to use (default is last, -1)
    path : str, optional
        Path to original restart files (default: "data")
    output : str, optional
        Path to write new restart files (default: current directory)
    informat : str, optional
        File extension of original files (default: "nc")
    outformat : str, optional
        File extension of new files (default: use the same as `informat`)

    """

    if outformat is None:
        outformat = informat

    file_list = glob.glob(os.path.join(path, "BOUT.dmp.*."+informat))
    nfiles = len(file_list)

    print(("Number of data files: ", nfiles))

    for i in range(nfiles):
        # Open each data file
        infname = os.path.join(path, "BOUT.dmp."+str(i)+"."+informat)
        outfname = os.path.join(output, "BOUT.restart."+str(i)+"."+outformat)

        print((infname, " -> ", outfname))

        infile = DataFile(infname)
        outfile = DataFile(outfname, create=True)

        # Get the data always needed in restart files
        hist_hi = infile.read("iteration")
        print(("hist_hi = ", hist_hi))
        outfile.write("hist_hi", hist_hi)

        t_array = infile.read("t_array")
        tt = t_array[final]
        print(("tt = ", tt))
        outfile.write("tt", tt)

        tind = final
        if tind < 0.0:
            tind = len(t_array) + final

        NXPE = infile.read("NXPE")
        NYPE = infile.read("NYPE")
        print(("NXPE = ", NXPE, " NYPE = ", NYPE))
        outfile.write("NXPE", NXPE)
        outfile.write("NYPE", NYPE)

        # Get a list of variables
        varnames = infile.list()

        for var in varnames:
            if infile.ndims(var) == 4:
                # Could be an evolving variable

                print((" -> ", var))

                data = infile.read(var)

                if averagelast == 1:
                    slice = data[final, :, :, :]
                else:
                    slice = mean(data[(final - averagelast)
                                 :final, :, :, :], axis=0)

                print(slice.shape)

                outfile.write(var, slice)

        infile.close()
        outfile.close()
Esempio n. 17
0
def create(averagelast=1,
           final=-1,
           path="data",
           output="./",
           informat="nc",
           outformat=None):
    """
    Create restart files from data (dmp) files.

    Inputs
    ======

    averagelast   Number of time points to average over.
                  Default is 1 i.e. just take last time-point

    final         The last time point to use. Default is last (-1)

    path          Path to the input data files

    output        Path where the output restart files should go

    informat      Format of the input data files

    outformat     Format of the output restart files

    """

    if outformat is None:
        outformat = informat

    file_list = glob.glob(os.path.join(path, "BOUT.dmp.*." + informat))
    nfiles = len(file_list)

    print(("Number of data files: ", nfiles))

    for i in range(nfiles):
        # Open each data file
        infname = os.path.join(path, "BOUT.dmp." + str(i) + "." + informat)
        outfname = os.path.join(output,
                                "BOUT.restart." + str(i) + "." + outformat)

        print((infname, " -> ", outfname))

        infile = DataFile(infname)
        outfile = DataFile(outfname, create=True)

        # Get the data always needed in restart files
        hist_hi = infile.read("iteration")
        print(("hist_hi = ", hist_hi))
        outfile.write("hist_hi", hist_hi)

        t_array = infile.read("t_array")
        tt = t_array[final]
        print(("tt = ", tt))
        outfile.write("tt", tt)

        tind = final
        if tind < 0.0:
            tind = len(t_array) + final

        NXPE = infile.read("NXPE")
        NYPE = infile.read("NYPE")
        NPES = NXPE * NYPE
        print(("NPES = ", NPES, " NXPE = ", NXPE))
        outfile.write("NPES", NPES)
        outfile.write("NXPE", NXPE)

        # Get a list of variables
        varnames = infile.list()

        for var in varnames:
            if infile.ndims(var) == 4:
                # Could be an evolving variable

                print((" -> ", var))

                data = infile.read(var)

                if averagelast == 1:
                    slice = data[final, :, :, :]
                else:
                    slice = mean(data[(final - averagelast):final, :, :, :],
                                 axis=0)

                print(slice.shape)

                outfile.write(var, slice)

        infile.close()
        outfile.close()
Esempio n. 18
0
def split(nxpe,
          nype,
          path="data",
          output="./",
          informat="nc",
          outformat=None,
          mxg=2,
          myg=2):
    """Split restart files across NXPE x NYPE processors.

    Returns True on success
    """

    if outformat is None:
        outformat = informat

    npes = nxpe * nype

    if npes <= 0:
        print("ERROR: Negative or zero number of processors")
        return False

    if path == output:
        print("ERROR: Can't overwrite restart files")
        return False

    file_list = glob.glob(os.path.join(path, "BOUT.restart.*." + informat))
    nfiles = len(file_list)

    if nfiles == 0:
        print("ERROR: No restart files found")
        return False

    # Read old processor layout
    f = DataFile(os.path.join(path, file_list[0]))

    # Get list of variables
    var_list = f.list()
    if len(var_list) == 0:
        print("ERROR: No data found")
        return False

    old_npes = f.read('NPES')
    old_nxpe = f.read('NXPE')

    if nfiles != old_npes:
        print("WARNING: Number of restart files inconsistent with NPES")
        print("Setting nfiles = " + str(old_npes))
        nfiles = old_npes

    if old_npes % old_nxpe != 0:
        print("ERROR: Old NPES is not a multiple of old NXPE")
        return False

    old_nype = int(old_npes / old_nxpe)

    if nype % old_nype != 0:
        print("SORRY: New nype must be a multiple of old nype")
        return False

    if nxpe % old_nxpe != 0:
        print("SORRY: New nxpe must be a multiple of old nxpe")
        return False

    # Get dimension sizes

    old_mxsub = 0
    old_mysub = 0
    mz = 0

    for v in var_list:
        if f.ndims(v) == 3:
            s = f.size(v)
            old_mxsub = s[0] - 2 * mxg
            old_mysub = s[1] - 2 * myg
            mz = s[2]
            break

    f.close()

    # Calculate total size of the grid
    nx = old_mxsub * old_nxpe
    ny = old_mysub * old_nype
    print(("Grid sizes: ", nx, ny, mz))

    # Create the new restart files
    for mype in range(npes):
        # Calculate X and Y processor numbers
        pex = mype % nxpe
        pey = int(mype / nxpe)

        old_pex = int(pex / xs)
        old_pey = int(pey / ys)

        old_x = pex % xs
        old_y = pey % ys

        # Old restart file number
        old_mype = old_nxpe * old_pey + old_pex

        # Calculate indices in old restart file
        xmin = old_x * mxsub
        xmax = xmin + mxsub - 1 + 2 * mxg
        ymin = old_y * mysub
        ymax = ymin + mysub - 1 + 2 * myg

        print("New: " + str(mype) + " (" + str(pex) + ", " + str(pey) + ")")
        print(" =>  " + str(old_mype) + " (" + str(old_pex) + ", " +
              str(old_pey) + ") : (" + str(old_x) + ", " + str(old_y) + ")")
def smooth_metric(fname,
                  write_to_file=False,
                  return_values=False,
                  smooth_metric=True,
                  order=7):
    from scipy.signal import savgol_filter
    f = DataFile(str(fname), write=True)
    B = f.read('B')
    bxcvx = f.read('bxcvx')
    bxcvz = f.read('bxcvz')
    bxcvy = f.read('bxcvy')
    J = f.read('J')

    bxcvx_smooth = np.zeros(bxcvx.shape)
    bxcvy_smooth = np.zeros(bxcvy.shape)
    bxcvz_smooth = np.zeros(bxcvz.shape)
    J_smooth = np.zeros(J.shape)

    if smooth_metric:
        g13 = f.read('g13')
        g_13 = f.read('g_13')
        g11 = f.read('g11')
        g_11 = f.read('g_11')
        g33 = f.read('g33')
        g_33 = f.read('g_33')

        g13_smooth = np.zeros(g13.shape)
        g_13_smooth = np.zeros(g_13.shape)
        g11_smooth = np.zeros(g11.shape)
        g_11_smooth = np.zeros(g_11.shape)
        g33_smooth = np.zeros(g33.shape)
        g_33_smooth = np.zeros(g_33.shape)

    for y in np.arange(0, bxcvx.shape[1]):
        for x in np.arange(0, bxcvx.shape[0]):
            bxcvx_smooth[x, y, :] = savgol_filter(
                bxcvx[x, y, :],
                np.int(np.ceil(bxcvx.shape[-1] / 2) // 2 * 2 + 1), order)
            bxcvz_smooth[x, y, :] = savgol_filter(
                bxcvz[x, y, :],
                np.int(np.ceil(bxcvz.shape[-1] / 2) // 2 * 2 + 1), order)
            bxcvy_smooth[x, y, :] = savgol_filter(
                bxcvy[x, y, :],
                np.int(np.ceil(bxcvy.shape[-1] / 2) // 2 * 2 + 1), order)
            J_smooth[x, y, :] = savgol_filter(
                J[x, y, :], np.int(np.ceil(J.shape[-1] / 2) // 2 * 2 + 1),
                order)
            if smooth_metric:
                g11_smooth[x, y, :] = savgol_filter(
                    g11[x, y, :],
                    np.int(np.ceil(g11.shape[-1] / 2) // 2 * 2 + 1), order)
                g_11_smooth[x, y, :] = savgol_filter(
                    g_11[x, y, :],
                    np.int(np.ceil(g_11.shape[-1] / 2) // 2 * 2 + 1), order)
                g13_smooth[x, y, :] = savgol_filter(
                    g13[x, y, :],
                    np.int(np.ceil(g13.shape[-1] / 2) // 2 * 2 + 1), order)
                g_13_smooth[x, y, :] = savgol_filter(
                    g_13[x, y, :],
                    np.int(np.ceil(g_13.shape[-1] / 2) // 2 * 2 + 1), order)
                g33_smooth[x, y, :] = savgol_filter(
                    g33[x, y, :],
                    np.int(np.ceil(g33.shape[-1] / 2) // 2 * 2 + 1), order)
                g_33_smooth[x, y, :] = savgol_filter(
                    g_33[x, y, :],
                    np.int(np.ceil(g_33.shape[-1] / 2) // 2 * 2 + 1), order)

    if (write_to_file):
        # f.write('bxcvx',bxcvx_smooth)
        # f.write('bxcvy',bxcvy_smooth)
        # f.write('bxcvz',bxcvz_smooth)
        f.write('J', J_smooth)

        if smooth_metric:
            f.write('g11', g11_smooth)
            f.write('g_11', g_11_smooth)
            f.write('g13', g13_smooth)
            f.write('g_13', g_13_smooth)
            f.write('g33', g33_smooth)
            f.write('g_33', g_33_smooth)

    f.close()
    if (return_values):
        return bxcvx_smooth, bxcvy_smooth, bxcvz_smooth, bxcvx, bxcvy, bxcvz
def calc_curvilinear_curvature(fname, field, grid, maps):
    from scipy.signal import savgol_filter

    f = DataFile(str(fname), write=True)
    B = f.read("B")

    dx = grid.metric()["dx"]
    dz = grid.metric()["dz"]
    g_11 = grid.metric()["g_xx"]
    g_22 = grid.metric()["g_yy"]
    g_33 = grid.metric()["g_zz"]
    g_12 = 0.0
    g_13 = grid.metric()["g_xz"]
    g_23 = 0.0

    GR = np.zeros(B.shape)
    GZ = np.zeros(B.shape)
    Gphi = np.zeros(B.shape)
    dRdz = np.zeros(B.shape)
    dZdz = np.zeros(B.shape)
    dRdx = np.zeros(B.shape)
    dZdx = np.zeros(B.shape)

    for y in np.arange(0, B.shape[1]):
        pol, _ = grid.getPoloidalGrid(y)
        R = pol.R
        Z = pol.Z
        # G = \vec{B}/B, here in cylindrical coordinates
        GR[:, y, :] = field.Bxfunc(R, y, Z) / ((B[:, y, :])**2)
        GZ[:, y, :] = field.Bzfunc(R, y, Z) / ((B[:, y, :])**2)
        Gphi[:, y, :] = field.Byfunc(R, y, Z) / ((B[:, y, :])**2)
        for x in np.arange(0, B.shape[0]):
            dRdz[x, y, :] = calc.deriv(R[x, :]) / dz[x, y, :]
            dZdz[x, y, :] = calc.deriv(Z[x, :]) / dz[x, y, :]
        for z in np.arange(0, B.shape[-1]):
            dRdx[:, y, z] = calc.deriv(R[:, z]) / dx[:, y, z]
            dZdx[:, y, z] = calc.deriv(Z[:, z]) / dx[:, y, z]

    R = f.read("R")
    Z = f.read("Z")
    dy = f.read("dy")

    ## calculate Jacobian and contravariant terms in curvilinear coordinates
    J = R * (dZdz * dRdx - dZdx * dRdz)
    Gx = (GR * dZdz - GZ * dRdz) * (R / J)
    Gz = (GZ * dRdx - GR * dZdx) * (R / J)

    G_x = Gx * g_11 + Gphi * g_12 + Gz * g_13
    G_y = Gx * g_12 + Gphi * g_22 + Gz * g_23
    G_z = Gx * g_13 + Gphi * g_23 + Gz * g_33

    dG_zdy = np.zeros(B.shape)
    dG_ydz = np.zeros(B.shape)
    dG_xdz = np.zeros(B.shape)
    dG_zdx = np.zeros(B.shape)
    dG_ydx = np.zeros(B.shape)
    dG_xdy = np.zeros(B.shape)
    for y in np.arange(0, B.shape[1]):
        for x in np.arange(0, B.shape[0]):
            dG_ydz[x, y, :] = calc.deriv(G_y[x, y, :]) / dz[x, y, :]
            dG_xdz[x, y, :] = calc.deriv(G_x[x, y, :]) / dz[x, y, :]
        for z in np.arange(0, B.shape[-1]):
            dG_ydx[:, y, z] = calc.deriv(G_y[:, y, z]) / dx[:, y, z]
            dG_zdx[:, y, z] = calc.deriv(G_z[:, y, z]) / dx[:, y, z]

    #this should really use the maps...
    for x in np.arange(0, B.shape[0]):
        for z in np.arange(0, B.shape[-1]):
            dG_zdy[x, :, z] = calc.deriv(G_z[x, :, z]) / dy[x, :, z]
            dG_xdy[x, :, z] = calc.deriv(G_x[x, :, z]) / dy[x, :, z]

    bxcvx = (dG_zdy - dG_ydz) / J
    bxcvy = (dG_xdz - dG_zdx) / J
    bxcvz = (dG_ydx - dG_xdy) / J
    bxcv = g_11 * (bxcvx**2) + g_22 * (bxcvy**2) + g_33 * (bxcvz**2) + 2 * (
        bxcvz * bxcvx * g_13)
    f.write('bxcvx', bxcvx)
    f.write('bxcvy', bxcvy)
    f.write('bxcvz', bxcvz)
    f.write('J', J)
    f.close()
Esempio n. 21
0
def pol_slice(var3d, gridfile, n=1, zangle=0.0):
    """ data2d = pol_slice(data3d, 'gridfile', n=1, zangle=0.0) """
    n = int(n)
    zangle = float(zangle)

    s = np.shape(var3d)
    if len(s) != 3:
        print("ERROR: pol_slice expects a 3D variable")
        return None

    nx, ny, nz = s

    dz = 2. * np.pi / float(n * (nz - 1))

    try:
        # Open the grid file
        gf = DataFile(gridfile)

        # Check the grid size is correct
        if gf.read("nx") != nx:
            print("ERROR: Grid X size is different to the variable")
            return None
        if gf.read("ny") != ny:
            print("ERROR: Grid Y size is different to the variable")
            return None

        # Get the toroidal shift
        zShift = gf.read("qinty")

        if zShift != None:
            print("Using qinty as toroidal shift angle")
        else:
            zShift = gf.read("zShift")
            if zShift != None:
                print("Using zShift as toroidal shift angle")
            else:
                print("ERROR: Neither qinty nor zShift found")
                return None

        gf.close()
    except:
        print("ERROR: pol_slice couldn't read grid file")
        return None

    var2d = np.zeros([nx, ny])

    ######################################
    # Perform 2D slice
    zind = (zangle - zShift) / dz
    z0f = np.floor(zind)
    z0 = z0f.astype(int)
    p = zind - z0f

    # Make z0 between 0 and (nz-2)
    z0 = ((z0 % (nz - 1)) + (nz - 1)) % (nz - 1)

    # Get z+ and z-
    zp = (z0 + 1) % (nz - 1)
    zm = (z0 - 1 + (nz - 1)) % (nz - 1)

    # There may be some more cunning way to do this indexing
    for x in np.arange(nx):
        for y in np.arange(ny):
            var2d[x,y] = 0.5*p[x,y]*(p[x,y]-1.0) * var3d[x,y,zm[x,y]] + \
                         (1.0 - p[x,y]*p[x,y])   * var3d[x,y,z0[x,y]] + \
                         0.5*p[x,y]*(p[x,y]+1.0) * var3d[x,y,zp[x,y]]

    return var2d
Esempio n. 22
0
def create(file_list,
           path,
           averagelast=1,
           final=-1,
           output="./",
           informat="nc",
           outformat=None):
    """Create restart files from data (dmp) files.

    Parameters
    ----------
    averagelast : int, optional
        Number of time points (counting from `final`, inclusive) to
        average over (default is 1 i.e. just take last time-point)
    final : int, optional
        The last time point to use (default is last, -1)
    path : str, optional
        Path to original restart files (default: "data")
    output : str, optional
        Path to write new restart files (default: current directory)
    informat : str, optional
        File extension of original files (default: "nc")
    outformat : str, optional
        File extension of new files (default: use the same as `informat`)

    """

    if outformat is None:
        outformat = informat

    nfiles = len(file_list)

    print(("Number of data files: ", nfiles))

    for i in range(nfiles):
        # Open each data file
        infname = os.path.join(path, "BOUT.dmp." + str(i) + "." + informat)
        outfname = os.path.join(output,
                                "BOUT.restart." + str(i) + "." + outformat)

        print((infname, " -> ", outfname))

        infile = DataFile(infname)
        outfile = DataFile(outfname, create=True)

        # Get the data always needed in restart files
        hist_hi = infile.read("iteration")
        print(("hist_hi = ", hist_hi))
        outfile.write("hist_hi", hist_hi)

        t_array = infile.read("t_array")
        tt = t_array[final]
        print(("tt = ", tt))
        outfile.write("tt", tt)

        tind = final
        if tind < 0.0:
            tind = len(t_array) + final

        NXPE = infile.read("NXPE")
        NYPE = infile.read("NYPE")
        print(("NXPE = ", NXPE, " NYPE = ", NYPE))
        outfile.write("NXPE", NXPE)
        outfile.write("NYPE", NYPE)

        # Get a list of variables
        varnames = infile.list()

        for var in varnames:
            if infile.ndims(var) == 4:
                # Could be an evolving variable

                print((" -> ", var))

                data = infile.read(var)

                if averagelast == 1:
                    slice = data[final, :, :, :]
                else:
                    slice = mean(data[(final - averagelast):final, :, :, :],
                                 axis=0)

                print(slice.shape)

                outfile.write(var, slice)

        infile.close()
        outfile.close()
Esempio n. 23
0
def pol_slice(var3d, gridfile, n=1, zangle=0.0):
    """ data2d = pol_slice(data3d, 'gridfile', n=1, zangle=0.0) """
    n = int(n)
    zangle = float(zangle)

    s = np.shape(var3d)
    if len(s) != 3:
        print("ERROR: pol_slice expects a 3D variable")
        return None

    nx, ny, nz = s

    dz = 2.*np.pi / float(n * (nz-1))

    try:
        # Open the grid file
        gf = DataFile(gridfile)

        # Check the grid size is correct
        if gf.read("nx") != nx:
            print("ERROR: Grid X size is different to the variable")
            return None
        if gf.read("ny") != ny:
            print("ERROR: Grid Y size is different to the variable")
            return None

        # Get the toroidal shift
        zShift = gf.read("qinty")

        if zShift != None:
            print("Using qinty as toroidal shift angle")
        else:
            zShift = gf.read("zShift")
            if zShift != None:
                print("Using zShift as toroidal shift angle")
            else:
                print("ERROR: Neither qinty nor zShift found")
                return None

        gf.close()
    except:
        print("ERROR: pol_slice couldn't read grid file")
        return None

    var2d = np.zeros([nx, ny])

    ######################################
    # Perform 2D slice
    zind = (zangle - zShift) / dz
    z0f = np.floor(zind)
    z0 = z0f.astype(int)
    p = zind - z0f

    # Make z0 between 0 and (nz-2)
    z0 = ((z0 % (nz-1)) + (nz-1)) % (nz-1)

    # Get z+ and z-
    zp = (z0 + 1) % (nz-1)
    zm = (z0 - 1 + (nz-1)) % (nz-1)

    # There may be some more cunning way to do this indexing
    for x in np.arange(nx):
        for y in np.arange(ny):
            var2d[x,y] = 0.5*p[x,y]*(p[x,y]-1.0) * var3d[x,y,zm[x,y]] + \
                         (1.0 - p[x,y]*p[x,y])   * var3d[x,y,z0[x,y]] + \
                         0.5*p[x,y]*(p[x,y]+1.0) * var3d[x,y,zp[x,y]]

    return var2d
Esempio n. 24
0
def split(nxpe, nype, path="data", output="./", informat="nc", outformat=None):
    """Split restart files across NXPE x NYPE processors.

    Returns True on success
    """

    if outformat is None:
        outformat = informat

    mxg = 2
    myg = 2

    npes = nxpe * nype

    if npes <= 0:
        print("ERROR: Negative or zero number of processors")
        return False

    if path == output:
        print("ERROR: Can't overwrite restart files")
        return False

    file_list = glob.glob(os.path.join(path, "BOUT.restart.*." + informat))
    nfiles = len(file_list)

    if nfiles == 0:
        print("ERROR: No restart files found")
        return False

    # Read old processor layout
    f = DataFile(os.path.join(path, file_list[0]))

    # Get list of variables
    var_list = f.list()
    if len(var_list) == 0:
        print("ERROR: No data found")
        return False

    old_npes = f.read("NPES")
    old_nxpe = f.read("NXPE")

    if nfiles != old_npes:
        print("WARNING: Number of restart files inconsistent with NPES")
        print("Setting nfiles = " + str(old_npes))
        nfiles = old_npes

    if old_npes % old_nxpe != 0:
        print("ERROR: Old NPES is not a multiple of old NXPE")
        return False

    old_nype = int(old_npes / old_nxpe)

    if nype % old_nype != 0:
        print("SORRY: New nype must be a multiple of old nype")
        return False

    if nxpe % old_nxpe != 0:
        print("SORRY: New nxpe must be a multiple of old nxpe")
        return False

    # Get dimension sizes

    old_mxsub = 0
    old_mysub = 0
    mz = 0

    for v in var_list:
        if f.ndims(v) == 3:
            s = f.size(v)
            old_mxsub = s[0] - 2 * mxg
            old_mysub = s[1] - 2 * myg
            mz = s[2]
            break

    f.close()

    # Calculate total size of the grid
    nx = old_mxsub * old_nxpe
    ny = old_mysub * old_nype
    print(("Grid sizes: ", nx, ny, mz))

    # Create the new restart files
    for mype in range(npes):
        # Calculate X and Y processor numbers
        pex = mype % nxpe
        pey = int(mype / nxpe)

        old_pex = int(pex / xs)
        old_pey = int(pey / ys)

        old_x = pex % xs
        old_y = pey % ys

        # Old restart file number
        old_mype = old_nxpe * old_pey + old_pex

        # Calculate indices in old restart file
        xmin = old_x * mxsub
        xmax = xmin + mxsub - 1 + 2 * mxg
        ymin = old_y * mysub
        ymax = ymin + mysub - 1 + 2 * myg

        print("New: " + str(mype) + " (" + str(pex) + ", " + str(pey) + ")")
        print(
            " =>  "
            + str(old_mype)
            + " ("
            + str(old_pex)
            + ", "
            + str(old_pey)
            + ") : ("
            + str(old_x)
            + ", "
            + str(old_y)
            + ")"
        )
Esempio n. 25
0
def create(averagelast=1, final=-1, path="data", output="./", informat="nc", outformat=None):
    """
    Create restart files from data (dmp) files.

    Inputs
    ======

    averagelast   Number of time points to average over.
                  Default is 1 i.e. just take last time-point

    final         The last time point to use. Default is last (-1)

    path          Path to the input data files

    output        Path where the output restart files should go

    informat      Format of the input data files

    outformat     Format of the output restart files

    """

    if outformat is None:
        outformat = informat

    file_list = glob.glob(os.path.join(path, "BOUT.dmp.*." + informat))
    nfiles = len(file_list)

    print(("Number of data files: ", nfiles))

    for i in range(nfiles):
        # Open each data file
        infname = os.path.join(path, "BOUT.dmp." + str(i) + "." + informat)
        outfname = os.path.join(output, "BOUT.restart." + str(i) + "." + outformat)

        print((infname, " -> ", outfname))

        infile = DataFile(infname)
        outfile = DataFile(outfname, create=True)

        # Get the data always needed in restart files
        hist_hi = infile.read("iteration")
        print(("hist_hi = ", hist_hi))
        outfile.write("hist_hi", hist_hi)

        t_array = infile.read("t_array")
        tt = t_array[final]
        print(("tt = ", tt))
        outfile.write("tt", tt)

        tind = final
        if tind < 0.0:
            tind = len(t_array) + final

        NXPE = infile.read("NXPE")
        NYPE = infile.read("NYPE")
        NPES = NXPE * NYPE
        print(("NPES = ", NPES, " NXPE = ", NXPE))
        outfile.write("NPES", NPES)
        outfile.write("NXPE", NXPE)

        # Get a list of variables
        varnames = infile.list()

        for var in varnames:
            if infile.ndims(var) == 4:
                # Could be an evolving variable

                print((" -> ", var))

                data = infile.read(var)

                if averagelast == 1:
                    slice = data[final, :, :, :]
                else:
                    slice = mean(data[(final - averagelast) : final, :, :, :], axis=0)

                print(slice.shape)

                outfile.write(var, slice)

        infile.close()
        outfile.close()
Esempio n. 26
0
def redistribute(npes,
                 path="data",
                 nxpe=None,
                 output=".",
                 informat=None,
                 outformat=None,
                 mxg=2,
                 myg=2):
    """Resize restart files across NPES processors.

    Does not check if new processor arrangement is compatible with the branch cuts. In this respect restart.split is safer. However, BOUT++ checks the topology during initialisation anyway so this is not too serious.

    Parameters
    ----------
    npes : int
        number of processors for the new restart files
    path : string, optional
        location of old restart files
    nxpe : int, optional
        number of processors to use in the x-direction (determines split: npes = nxpe * nype). Default is None which uses the same algorithm as BoutMesh (but without topology information) to determine a suitable value for nxpe.
    output : string, optional
        location to save new restart files
    informat : string, optional
        specify file format of old restart files (must be a suffix understood by DataFile, e.g. 'nc'). Default uses the format of the first 'BOUT.restart.*' file listed by glob.glob.
    outformat : string, optional
        specify file format of new restart files (must be a suffix understood by DataFile, e.g. 'nc'). Default is to use the same as informat.

    Returns
    -------
    True on success
    """

    if npes <= 0:
        print("ERROR: Negative or zero number of processors")
        return False

    if path == output:
        print("ERROR: Can't overwrite restart files")
        return False

    if informat is None:
        file_list = glob.glob(os.path.join(path, "BOUT.restart.*"))
    else:
        file_list = glob.glob(os.path.join(path, "BOUT.restart.*." + informat))

    nfiles = len(file_list)

    # Read old processor layout
    f = DataFile(file_list[0])

    # Get list of variables
    var_list = f.list()
    if len(var_list) == 0:
        print("ERROR: No data found")
        return False

    old_npes = f.read('NPES')
    old_nxpe = f.read('NXPE')
    old_nype = int(old_npes / old_nxpe)

    if nfiles != old_npes:
        print("WARNING: Number of restart files inconsistent with NPES")
        print("Setting nfiles = " + str(old_npes))
        nfiles = old_npes

    if nfiles == 0:
        print("ERROR: No restart files found")
        return False

    informat = file_list[0].split(".")[-1]
    if outformat is None:
        outformat = informat

    old_mxsub = 0
    old_mysub = 0
    mz = 0

    for v in var_list:
        if f.ndims(v) == 3:
            s = f.size(v)
            old_mxsub = s[0] - 2 * mxg
            if old_mxsub < 0:
                if s[0] == 1:
                    old_mxsub = 1
                    mxg = 0
                elif s[0] == 3:
                    old_mxsub = 1
                    mxg = 1
                else:
                    print("Number of x points is wrong?")
                    return False

            old_mysub = s[1] - 2 * myg
            if old_mysub < 0:
                if s[1] == 1:
                    old_mysub = 1
                    myg = 0
                elif s[1] == 3:
                    old_mysub = 1
                    myg = 1
                else:
                    print("Number of y points is wrong?")
                    return False

            mz = s[2]
            break

    # Calculate total size of the grid
    nx = old_mxsub * old_nxpe
    ny = old_mysub * old_nype
    print("Grid sizes: ", nx, ny, mz)

    if nxpe is None:  # Copy algorithm from BoutMesh for selecting nxpe
        ideal = sqrt(float(nx) * float(npes) /
                     float(ny))  # Results in square domain

        for i in range(1, npes + 1):
            if npes % i == 0 and nx % i == 0 and int(
                    nx / i) >= mxg and ny % (npes / i) == 0:
                # Found an acceptable value
                # Warning: does not check branch cuts!

                if nxpe is None or abs(ideal - i) < abs(ideal - nxpe):
                    nxpe = i  # Keep value nearest to the ideal

        if nxpe is None:
            print("ERROR: could not find a valid value for nxpe")
            return False

    nype = int(npes / nxpe)

    outfile_list = []
    for i in range(npes):
        outpath = os.path.join(output,
                               "BOUT.restart." + str(i) + "." + outformat)
        outfile_list.append(DataFile(outpath, write=True, create=True))
    infile_list = []
    for i in range(old_npes):
        inpath = os.path.join(path, "BOUT.restart." + str(i) + "." + outformat)
        infile_list.append(DataFile(inpath))

    old_mxsub = int(nx / old_nxpe)
    old_mysub = int(ny / old_nype)
    mxsub = int(nx / nxpe)
    mysub = int(ny / nype)
    for v in var_list:
        ndims = f.ndims(v)

        #collect data
        if ndims == 0:
            #scalar
            data = f.read(v)
        elif ndims == 2:
            data = np.zeros((nx + 2 * mxg, ny + 2 * myg))
            for i in range(old_npes):
                ix = i % old_nxpe
                iy = int(i / old_nxpe)
                ixstart = mxg
                if ix == 0:
                    ixstart = 0
                ixend = -mxg
                if ix == old_nxpe - 1:
                    ixend = 0
                iystart = myg
                if iy == 0:
                    iystart = 0
                iyend = -myg
                if iy == old_nype - 1:
                    iyend = 0
                data[ix * old_mxsub + ixstart:(ix + 1) * old_mxsub + 2 * mxg +
                     ixend,
                     iy * old_mysub + iystart:(iy + 1) * old_mysub + 2 * myg +
                     iyend] = infile_list[i].read(v)[ixstart:old_mxsub +
                                                     2 * mxg + ixend,
                                                     iystart:old_mysub +
                                                     2 * myg + iyend]
        elif ndims == 3:
            data = np.zeros((nx + 2 * mxg, ny + 2 * myg, mz))
            for i in range(old_npes):
                ix = i % old_nxpe
                iy = int(i / old_nxpe)
                ixstart = mxg
                if ix == 0:
                    ixstart = 0
                ixend = -mxg
                if ix == old_nxpe - 1:
                    ixend = 0
                iystart = myg
                if iy == 0:
                    iystart = 0
                iyend = -myg
                if iy == old_nype - 1:
                    iyend = 0
                data[ix * old_mxsub + ixstart:(ix + 1) * old_mxsub + 2 * mxg +
                     ixend,
                     iy * old_mysub + iystart:(iy + 1) * old_mysub + 2 * myg +
                     iyend, :] = infile_list[i].read(v)[ixstart:old_mxsub +
                                                        2 * mxg + ixend,
                                                        iystart:old_mysub +
                                                        2 * myg + iyend, :]
        else:
            print(
                "ERROR: variable found with unexpected number of dimensions,",
                ndims, v)
            return False

        # write data
        for i in range(npes):
            ix = i % nxpe
            iy = int(i / nxpe)
            outfile = outfile_list[i]
            if v == "NPES":
                outfile.write(v, npes)
            elif v == "NXPE":
                outfile.write(v, nxpe)
            elif ndims == 0:
                # scalar
                outfile.write(v, data)
            elif ndims == 2:
                # Field2D
                outfile.write(
                    v, data[ix * mxsub:(ix + 1) * mxsub + 2 * mxg,
                            iy * mysub:(iy + 1) * mysub + 2 * myg])
            elif ndims == 3:
                # Field3D
                outfile.write(
                    v, data[ix * mxsub:(ix + 1) * mxsub + 2 * mxg,
                            iy * mysub:(iy + 1) * mysub + 2 * myg, :])
            else:
                print(
                    "ERROR: variable found with unexpected number of dimensions,",
                    f.ndims(v))

    f.close()
    for infile in infile_list:
        infile.close()
    for outfile in outfile_list:
        outfile.close()

    return True
Esempio n. 27
0
def resizeY(newy, path="data", output=".", informat="nc", outformat=None, myg=2):
    """Increase the number of Y points in restart files

    NOTE:
        * Can't overwrite

    Parameters
    ----------
    newy : int
        ny for the new file
    path : str, optional
        Path to original restart files (default: "data")
    output : str, optional
        Path to write new restart files (default: current directory)
    informat : str, optional
        File extension of original files (default: "nc")
    outformat : str, optional
        File extension of new files (default: use the same as `informat`)
    myg : int, optional
        Number of ghost points in y (default: 2)

    Returns
    -------
    True on success, else False

    TODO
    ----
    - Replace printing errors with raising `ValueError`
    - Make informat work like `redistribute`

    """

    if outformat is None:
        outformat = informat

    file_list = glob.glob(os.path.join(path, "BOUT.restart.*."+informat))

    nfiles = len(file_list)

    if nfiles == 0:
        print("ERROR: No restart files found")
        return False

    for i in range(nfiles):
        # Open each data file
        infname = os.path.join(path, "BOUT.restart."+str(i)+"."+informat)
        outfname = os.path.join(output, "BOUT.restart."+str(i)+"."+outformat)

        print("Processing %s -> %s" % (infname, outfname))

        infile = DataFile(infname)
        outfile = DataFile(outfname, create=True)

        # Copy basic information
        for var in ["hist_hi", "NXPE", "NYPE", "tt"]:
            data = infile.read(var)
            try:
                # Convert to scalar if necessary
                data = data[0]
            except:
                pass
            outfile.write(var, data)

        # Get a list of variables
        varnames = infile.list()

        for var in varnames:
            if infile.ndims(var) == 3:
                # Could be an evolving variable [x,y,z]

                print(" -> Resizing " + var)

                # Read variable from input
                indata = infile.read(var)

                nx, ny, nz = indata.shape

                # y coordinate in input and output data
                iny = (arange(ny) - myg + 0.5) / (ny - 2*myg)
                outy = (arange(newy) - myg + 0.5) / (newy - 2*myg)

                outdata = zeros([nx, newy, nz])

                for x in range(nx):
                    for z in range(nz):
                        f = interp1d(
                            iny, indata[x, :, z], bounds_error=False, fill_value=0.0)
                        outdata[x, :, z] = f(outy)

                outfile.write(var, outdata)
            elif infile.ndims(var) == 2:
                # Assume evolving variable [x,y]
                print(" -> Resizing " + var)

                # Read variable from input
                indata = infile.read(var)

                nx, ny = indata.shape

                # y coordinate in input and output data
                iny = (arange(ny) - myg + 0.5) / (ny - 2*myg)
                outy = (arange(newy) - myg + 0.5) / (newy - 2*myg)

                outdata = zeros([nx, newy])

                for x in range(nx):
                    f = interp1d(iny, indata[x, :],
                                 bounds_error=False, fill_value=0.0)
                    outdata[x, :] = f(outy)

                outfile.write(var, outdata)
            else:
                # Copy variable
                print(" -> Copying " + var)

                # Read variable from input
                data = infile.read(var)
                try:
                    # Convert to scalar if necessary
                    data = data[0]
                except:
                    pass
                outfile.write(var, data)

        infile.close()
        outfile.close()
Esempio n. 28
0
def collect(varname,
            xind=None,
            yind=None,
            zind=None,
            tind=None,
            path=".",
            yguards=False,
            xguards=True,
            info=True,
            prefix="BOUT.dmp",
            strict=False):
    """Collect a variable from a set of BOUT++ outputs.

    data = collect(name)

    name   Name of the variable (string)

    Optional arguments:

    xind = [min,max]   Range of X indices to collect
    yind = [min,max]   Range of Y indices to collect
    zind = [min,max]   Range of Z indices to collect
    tind = [min,max]   Range of T indices to collect

    path    = "."          Path to data files
    prefix  = "BOUT.dmp"   File prefix
    yguards = False        Collect Y boundary guard cells?
    xguards = True         Collect X boundary guard cells?
                           (Set to True to be consistent with the
                           definition of nx)
    info    = True         Print information about collect?
    strict  = False        Fail if the exact variable name is not found?
    """

    # Search for BOUT++ dump files in NetCDF format
    file_list_nc = glob.glob(os.path.join(path, prefix + ".nc"))
    file_list_h5 = glob.glob(os.path.join(path, prefix + ".hdf5"))
    if file_list_nc != [] and file_list_h5 != []:
        raise IOError(
            "Error: Both NetCDF and HDF5 files are present: do not know which to read."
        )
    elif file_list_h5 != []:
        suffix = ".hdf5"
        file_list = file_list_h5
    else:
        suffix = ".nc"
        file_list = file_list_nc
    if file_list != []:
        print("Single (parallel) data file")
        f = DataFile(file_list[0])  # Open the file

        data = f.read(varname)
        return data

    file_list_nc = glob.glob(os.path.join(path, prefix + ".*nc"))
    file_list_h5 = glob.glob(os.path.join(path, prefix + ".*hdf5"))
    if file_list_nc != [] and file_list_h5 != []:
        raise IOError(
            "Error: Both NetCDF and HDF5 files are present: do not know which to read."
        )
    elif file_list_h5 != []:
        suffix = ".hdf5"
        file_list = file_list_h5
    else:
        suffix = ".nc"
        file_list = file_list_nc

    file_list.sort()
    if file_list == []:
        raise IOError("ERROR: No data files found")

    nfiles = len(file_list)

    # Read data from the first file
    f = DataFile(file_list[0])

    try:
        dimens = f.dimensions(varname)
        #ndims = len(dimens)
        ndims = f.ndims(varname)
    except:
        if strict:
            raise
        else:
            # Find the variable
            varname = findVar(varname, f.list())

            dimens = f.dimensions(varname)
            #ndims = len(dimens)
            ndims = f.ndims(varname)

    # ndims is 0 for reals, and 1 for f.ex. t_array
    if ndims < 2:
        # Just read from file
        if varname != 't_array':
            data = f.read(varname)
        elif (varname == 't_array') and (tind is None):
            data = f.read(varname)
        elif (varname == 't_array') and (tind is not None):
            data = f.read(varname, ranges=[tind[0], tind[1] + 1])
        f.close()
        return data

    if ndims > 4:
        raise ValueError("ERROR: Too many dimensions")

    mxsub = f.read("MXSUB")
    if mxsub is None:
        raise ValueError("Missing MXSUB variable")
    mysub = f.read("MYSUB")
    mz = f.read("MZ")
    myg = f.read("MYG")
    t_array = f.read("t_array")
    if t_array is None:
        nt = 1
        t_array = np.zeros(1)
    else:
        nt = len(t_array)

    if info:
        print("mxsub = %d mysub = %d mz = %d\n" % (mxsub, mysub, mz))

    # Get the version of BOUT++ (should be > 0.6 for NetCDF anyway)
    try:
        version = f["BOUT_VERSION"]
    except KeyError:
        print("BOUT++ version : Pre-0.2")
        version = 0
    if version < 3.5:
        # Remove extra point
        nz = mz - 1
    else:
        nz = mz

    # Fallback to sensible (?) defaults
    try:
        nxpe = f["NXPE"]
    except KeyError:
        nxpe = 1
        print("NXPE not found, setting to {}".format(nxpe))
    try:
        mxg = f["MXG"]
    except KeyError:
        mxg = 0
        print("MXG not found, setting to {}".format(mxg))
    try:
        nype = f["NYPE"]
    except KeyError:
        nype = nfiles
        print("NYPE not found, setting to {}".format(nype))

    npe = nxpe * nype
    if info:
        print("nxpe = %d, nype = %d, npe = %d\n" % (nxpe, nype, npe))
        if npe < nfiles:
            print("WARNING: More files than expected (" + str(npe) + ")")
        elif npe > nfiles:
            print("WARNING: Some files missing. Expected " + str(npe))

    if xguards:
        nx = nxpe * mxsub + 2 * mxg
    else:
        nx = nxpe * mxsub

    if yguards:
        ny = mysub * nype + 2 * myg
    else:
        ny = mysub * nype

    f.close()

    # Check ranges

    def check_range(r, low, up, name="range"):
        r2 = r
        if r is not None:
            try:
                n = len(r2)
            except:
                # No len attribute, so probably a single number
                r2 = [r2, r2]
            if (len(r2) < 1) or (len(r2) > 2):
                print("WARNING: " + name + " must be [min, max]")
                r2 = None
            else:
                if len(r2) == 1:
                    r2 = [r2, r2]
                if r2[0] < 0 and low >= 0:
                    r2[0] += (up - low + 1)
                if r2[1] < 0 and low >= 0:
                    r2[1] += (up - low + 1)
                if r2[0] < low:
                    r2[0] = low
                if r2[0] > up:
                    r2[0] = up
                if r2[1] < low:
                    r2[1] = low
                if r2[1] > up:
                    r2[1] = up
                if r2[0] > r2[1]:
                    tmp = r2[0]
                    r2[0] = r2[1]
                    r2[1] = tmp
        else:
            r2 = [low, up]
        return r2

    xind = check_range(xind, 0, nx - 1, "xind")
    yind = check_range(yind, 0, ny - 1, "yind")
    zind = check_range(zind, 0, nz - 1, "zind")
    tind = check_range(tind, 0, nt - 1, "tind")

    xsize = xind[1] - xind[0] + 1
    ysize = yind[1] - yind[0] + 1
    zsize = zind[1] - zind[0] + 1
    tsize = tind[1] - tind[0] + 1

    # Map between dimension names and output size
    sizes = {'x': xsize, 'y': ysize, 'z': zsize, 't': tsize}

    # Create a list with size of each dimension
    ddims = [sizes[d] for d in dimens]

    # Create the data array
    data = np.zeros(ddims)

    for i in range(npe):
        # Get X and Y processor indices
        pe_yind = int(i / nxpe)
        pe_xind = i % nxpe

        inrange = True

        if yguards:
            # Get local ranges
            ymin = yind[0] - pe_yind * mysub
            ymax = yind[1] - pe_yind * mysub

            # Check lower y boundary
            if pe_yind == 0:
                # Keeping inner boundary
                if ymax < 0: inrange = False
                if ymin < 0: ymin = 0
            else:
                if ymax < myg: inrange = False
                if ymin < myg: ymin = myg

            # Upper y boundary
            if pe_yind == (nype - 1):
                # Keeping outer boundary
                if ymin >= (mysub + 2 * myg): inrange = False
                if ymax > (mysub + 2 * myg - 1): ymax = (mysub + 2 * myg - 1)
            else:
                if ymin >= (mysub + myg): inrange = False
                if ymax >= (mysub + myg): ymax = (mysub + myg - 1)

            # Calculate global indices
            ygmin = ymin + pe_yind * mysub
            ygmax = ymax + pe_yind * mysub

        else:
            # Get local ranges
            ymin = yind[0] - pe_yind * mysub + myg
            ymax = yind[1] - pe_yind * mysub + myg

            if (ymin >= (mysub + myg)) or (ymax < myg):
                inrange = False  # Y out of range

            if ymin < myg:
                ymin = myg
            if ymax >= mysub + myg:
                ymax = myg + mysub - 1

            # Calculate global indices
            ygmin = ymin + pe_yind * mysub - myg
            ygmax = ymax + pe_yind * mysub - myg

        if xguards:
            # Get local ranges
            xmin = xind[0] - pe_xind * mxsub
            xmax = xind[1] - pe_xind * mxsub

            # Check lower x boundary
            if pe_xind == 0:
                # Keeping inner boundary
                if xmax < 0: inrange = False
                if xmin < 0: xmin = 0
            else:
                if xmax < mxg: inrange = False
                if xmin < mxg: xmin = mxg

            # Upper x boundary
            if pe_xind == (nxpe - 1):
                # Keeping outer boundary
                if xmin >= (mxsub + 2 * mxg): inrange = False
                if xmax > (mxsub + 2 * mxg - 1): xmax = (mxsub + 2 * mxg - 1)
            else:
                if xmin >= (mxsub + mxg): inrange = False
                if xmax >= (mxsub + mxg): xmax = (mxsub + mxg - 1)

            # Calculate global indices
            xgmin = xmin + pe_xind * mxsub
            xgmax = xmax + pe_xind * mxsub

        else:
            # Get local ranges
            xmin = xind[0] - pe_xind * mxsub + mxg
            xmax = xind[1] - pe_xind * mxsub + mxg

            if (xmin >= (mxsub + mxg)) or (xmax < mxg):
                inrange = False  # X out of range

            if xmin < mxg:
                xmin = mxg
            if xmax >= mxsub + mxg:
                xmax = mxg + mxsub - 1

            # Calculate global indices
            xgmin = xmin + pe_xind * mxsub - mxg
            xgmax = xmax + pe_xind * mxsub - mxg

        # Number of local values
        nx_loc = xmax - xmin + 1
        ny_loc = ymax - ymin + 1

        if not inrange:
            continue  # Don't need this file

        filename = os.path.join(path, prefix + "." + str(i) + suffix)
        if info:
            sys.stdout.write("\rReading from " + filename + ": [" + \
                                 str(xmin) + "-" + str(xmax) + "][" + \
                                 str(ymin) + "-" + str(ymax) + "] -> [" + \
                                 str(xgmin) + "-" + str(xgmax) + "][" + \
                                 str(ygmin) + "-" + str(ygmax) + "]")

        f = DataFile(filename)

        if ndims == 4:
            d = f.read(varname,
                       ranges=[
                           tind[0], tind[1] + 1, xmin, xmax + 1, ymin,
                           ymax + 1, zind[0], zind[1] + 1
                       ])
            data[:, (xgmin - xind[0]):(xgmin - xind[0] + nx_loc),
                 (ygmin - yind[0]):(ygmin - yind[0] + ny_loc), :] = d
        elif ndims == 3:
            # Could be xyz or txy

            if dimens[2] == 'z':  # xyz
                d = f.read(varname,
                           ranges=[
                               xmin, xmax + 1, ymin, ymax + 1, zind[0],
                               zind[1] + 1
                           ])
                data[(xgmin - xind[0]):(xgmin - xind[0] + nx_loc),
                     (ygmin - yind[0]):(ygmin - yind[0] + ny_loc), :] = d
            else:  # txy
                d = f.read(varname,
                           ranges=[
                               tind[0], tind[1] + 1, xmin, xmax + 1, ymin,
                               ymax + 1
                           ])
                data[:, (xgmin - xind[0]):(xgmin - xind[0] + nx_loc),
                     (ygmin - yind[0]):(ygmin - yind[0] + ny_loc)] = d
        elif ndims == 2:
            # xy
            d = f.read(varname, ranges=[xmin, xmax + 1, ymin, ymax + 1])
            data[(xgmin - xind[0]):(xgmin - xind[0] + nx_loc),
                 (ygmin - yind[0]):(ygmin - yind[0] + ny_loc)] = d

        f.close()

    # Force the precision of arrays of dimension>1
    if ndims > 1:
        try:
            data = data.astype(t_array.dtype, copy=False)
        except TypeError:
            data = data.astype(t_array.dtype)

    # Finished looping over all files
    if info:
        sys.stdout.write("\n")
    return data
Esempio n. 29
0
def resizeY(newy,
            path="data",
            output=".",
            informat="nc",
            outformat=None,
            myg=2):
    """
    Resize all the restart files in Y
    """

    if outformat is None:
        outformat = informat

    file_list = glob.glob(os.path.join(path, "BOUT.restart.*." + informat))

    nfiles = len(file_list)

    if nfiles == 0:
        print("ERROR: No restart files found")
        return False

    for i in range(nfiles):
        # Open each data file
        infname = os.path.join(path, "BOUT.restart." + str(i) + "." + informat)
        outfname = os.path.join(output,
                                "BOUT.restart." + str(i) + "." + outformat)

        print("Processing %s -> %s" % (infname, outfname))

        infile = DataFile(infname)
        outfile = DataFile(outfname, create=True)

        # Copy basic information
        for var in ["hist_hi", "NPES", "NXPE", "tt"]:
            data = infile.read(var)
            try:
                # Convert to scalar if necessary
                data = data[0]
            except:
                pass
            outfile.write(var, data)

        # Get a list of variables
        varnames = infile.list()

        for var in varnames:
            if infile.ndims(var) == 3:
                # Could be an evolving variable [x,y,z]

                print(" -> Resizing " + var)

                # Read variable from input
                indata = infile.read(var)

                nx, ny, nz = indata.shape

                # y coordinate in input and output data
                iny = (arange(ny) - myg + 0.5) / (ny - 2 * myg)
                outy = (arange(newy) - myg + 0.5) / (newy - 2 * myg)

                outdata = zeros([nx, newy, nz])

                for x in range(nx):
                    for z in range(nz):
                        f = interp1d(iny,
                                     indata[x, :, z],
                                     bounds_error=False,
                                     fill_value=0.0)
                        outdata[x, :, z] = f(outy)

                outfile.write(var, outdata)
            elif infile.ndims(var) == 2:
                # Assume evolving variable [x,y]
                print(" -> Resizing " + var)

                # Read variable from input
                indata = infile.read(var)

                nx, ny = indata.shape

                # y coordinate in input and output data
                iny = (arange(ny) - myg + 0.5) / (ny - 2 * myg)
                outy = (arange(newy) - myg + 0.5) / (newy - 2 * myg)

                outdata = zeros([nx, newy])

                for x in range(nx):
                    f = interp1d(iny,
                                 indata[x, :],
                                 bounds_error=False,
                                 fill_value=0.0)
                    outdata[x, :] = f(outy)

                outfile.write(var, outdata)
            else:
                # Copy variable
                print(" -> Copying " + var)

                # Read variable from input
                data = infile.read(var)
                try:
                    # Convert to scalar if necessary
                    data = data[0]
                except:
                    pass
                outfile.write(var, data)

        infile.close()
        outfile.close()