Пример #1
0
def demo(iout=2, run='.', data='data', dir=0, iv=0):
    s = dispatch.snapshot(iout, run, data)
    #
    print('      yt patches:', len(dispatch.yt.patches(s)))
    print('domain_dimesions:', dispatch.yt.domain_dimensions(s))
    #
    parameters = dispatch.yt.parameters(s)
    yt.load_amr_grids(dispatch.yt.patches(s), **parameters)
Пример #2
0
 def load_grids():
     load_amr_grids(
         grid_data,
         dims,
         bbox=bbox,
         periodicity=(0, 0, 0),
         length_unit=1.0,
         refine_by=2,
     )
Пример #3
0
def setup_fake_refby():
    refine_by=np.array([5, 1, 1])
    top_grid_dim = [100,  10, 2]
    n1=100
    n2=10
    n3=2

    grid_data = [
        dict(left_edge = [0.0, 0.0, 0.0],
             right_edge = [1.0, np.pi, np.pi*2.],
             level = 0,
             dimensions = np.array([n1, n2, n3])),
        dict(left_edge = [0., 0., 0.],
             right_edge = [0.5, np.pi, np.pi*2.],
             level = 1,
             dimensions = refine_by*[n1/2.0, n2, n3]),
    ]

    for g in grid_data:
        g["density"] = (np.random.random(g["dimensions"].astype("i8")),
                        "g/cm**3")
    bbox = np.array([[0.0, 1.0], [0.0, np.pi], [0.0, np.pi*2]])

    ds = yt.load_amr_grids(grid_data, top_grid_dim,
                           bbox = bbox, geometry='spherical',
                           refine_by=refine_by, length_unit='kpc')
    return ds
Пример #4
0
 def load(self, request_fields=None):
     """
     Load all blocks' information, pack into a dictionary and feed to YT.
     """
     block_data = []
     if request_fields is None:
         request_fields = np.zeros(11)
         request_fields[:8] = self.variables
         request_fields[8:] = request_fields[4:7]
     variable_indese = np.flatnonzero(request_fields)
     self.open_file()
     self._preload()
     xl_domain = [0.] * 3
     xr_domain = [0.] * 3
     for level in range(0, self.lmax + 1):
         for blockID in self.lv_blockID[level].astype(np.int):
             one_block = self._read_one_block_coord(level, blockID)
             one_block.update(
                 self._read_one_block_variable(level, blockID,
                                               variable_indese))
             one_block['level'] = level + 1
             one_block['dimensions'] = self.block_dimensions
             block_data.append(one_block)
             xl_domain = np.minimum(xl_domain, one_block['left_edge'])
             xr_domain = np.maximum(xr_domain, one_block['right_edge'])
     self.close_file()
     bbox = np.vstack([xl_domain, xr_domain]).T.astype(np.float32)
     ds = yt.load_amr_grids(block_data,
                            self.dimensions,
                            bbox,
                            sim_time=self.time,
                            unit_system='code',
                            periodicity=self.periodicity)
     setup_fluid_fields(ds)
     return ds
Пример #5
0
def test_refine_by():
    grid_data = []
    ref_by = 4
    lo = 0.0
    hi = 1.0
    fine_grid_width = (hi - lo) / ref_by
    for level in range(2):
        grid_dict = {}

        grid_dict["left_edge"] = [0.0 + 0.5 * fine_grid_width * level] * 3
        grid_dict["right_edge"] = [1.0 - 0.5 * fine_grid_width * level] * 3
        grid_dict["dimensions"] = [8, 8, 8]
        grid_dict["level"] = level

        grid_dict["density"] = np.ones((8, 8, 8))

        grid_data.append(grid_dict)

    domain_dimensions = np.array([8, 8, 8])

    load_amr_grids(grid_data, domain_dimensions, refine_by=ref_by)
Пример #6
0
def snapshot(iout=1, run='.', data='../data', verbose=0, copy=True):
    """
        Open snapshot iout in directory data/run/, returning a YT data set
    """
    s = dispatch.snapshot(iout, run, data)
    if verbose > 1:
        print('time:', s.time)
    #
    if verbose:
        print('      yt patches:', len(s.patches))
        print('domain_dimesions:', dispatch.yt.domain_dimensions(s))
    #
    parameters = dispatch.yt.parameters(s)
    ds = yt.load_amr_grids(dispatch.yt.patches(s, copy=copy), **parameters)
    return ds
Пример #7
0
def load_yt(frame):

    sol = Solution(frame,path='_output',file_format='ascii')

    grid_data = []
     
    for state in sorted(sol.states, key = lambda a: a.patch.level):
        patch = state.patch
        d = {
            'left_edge': patch.lower_global,
            'right_edge': patch.upper_global,
            'level': patch.level,
            'dimensions': patch.num_cells_global,
            'q': state.q[0,...],
            'number_of_particles': 0,
            }
        grid_data.append(d)

    ds = yt.load_amr_grids(grid_data, sol.patch.num_cells_global)
    return ds
def test_refine_by():
    grid_data = []
    ref_by = 4
    lo = 0.0
    hi = 1.0
    fine_grid_width = (hi - lo) / ref_by
    for level in range(2):
        grid_dict = {}

        grid_dict['left_edge'] = [0.0 + 0.5*fine_grid_width*level]*3
        grid_dict['right_edge'] = [1.0 - 0.5*fine_grid_width*level]*3
        grid_dict['dimensions'] = [8, 8, 8]
        grid_dict['level'] = level

        grid_dict['density'] = np.ones((8,8,8))

        grid_data.append(grid_dict)

    domain_dimensions = np.array([8, 8, 8])

    spf = load_amr_grids(grid_data, domain_dimensions, refine_by=ref_by)
Пример #9
0
def test_qt_overflow():
    grid_data = []

    grid_dict = {}

    grid_dict['left_edge'] = [-1.0, -1.0, -1.0]
    grid_dict['right_edge'] = [1.0, 1.0, 1.0]
    grid_dict['dimensions'] = [8, 8, 8]
    grid_dict['level'] = 0

    grid_dict['density'] = np.ones((8,8,8))

    grid_data.append(grid_dict)

    domain_dimensions = np.array([8, 8, 8])

    spf = load_amr_grids(grid_data, domain_dimensions)

    def make_proj():
        p = ProjectionPlot(spf, 'x', ["density"], center='c', origin='native')
        return p
    assert_raises(YTIntDomainOverflow, make_proj)
def test_qt_overflow():
    grid_data = []

    grid_dict = {}

    grid_dict['left_edge'] = [-1.0, -1.0, -1.0]
    grid_dict['right_edge'] = [1.0, 1.0, 1.0]
    grid_dict['dimensions'] = [8, 8, 8]
    grid_dict['level'] = 0

    grid_dict['density'] = np.ones((8,8,8))

    grid_data.append(grid_dict)

    domain_dimensions = np.array([8, 8, 8])

    spf = load_amr_grids(grid_data, domain_dimensions)

    def make_proj():
        p = ProjectionPlot(spf, 'x', ["density"], center='c', origin='native')
        return p
    yield assert_raises, YTIntDomainOverflow, make_proj
Пример #11
0
def test_qt_overflow():
    grid_data = []

    grid_dict = {}

    grid_dict["left_edge"] = [-1.0, -1.0, -1.0]
    grid_dict["right_edge"] = [1.0, 1.0, 1.0]
    grid_dict["dimensions"] = [8, 8, 8]
    grid_dict["level"] = 0

    grid_dict["density"] = np.ones((8, 8, 8))

    grid_data.append(grid_dict)

    domain_dimensions = np.array([8, 8, 8])

    spf = load_amr_grids(grid_data, domain_dimensions)

    def make_proj():
        p = ProjectionPlot(spf, "x", ["density"], center="c", origin="native")
        return p

    assert_raises(YTIntDomainOverflow, make_proj)
Пример #12
0
def yt_loadathdf(filename,
                 quantities=None,
                 Prat=1,
                 Crat=1,
                 geometry='cartesian',
                 flag='yt'):
    f = h5py.File(filename, 'r')
    # to see all the available keys f.attrs.keys()
    nblock = f.attrs['TotalMeshBlock']
    block_size = f.attrs['MeshBlockSize']
    root_grid_size = f.attrs['RootGridSize']
    maxlevel = f.attrs['MaxLevel']
    cycle = f.attrs['NCycle']

    time = f.attrs['Time']

    #    nvariable=f.attrs['NVariables']

    nx = block_size[0]
    ny = block_size[1]
    nz = block_size[2]

    x1f = np.array(f[u'MeshBlock0'][u'x1f'])
    x2f = np.array(f[u'MeshBlock0'][u'x2f'])
    x3f = np.array(f[u'MeshBlock0'][u'x3f'])

    #    location=f[u'MeshBlock0'].attrs[u'LogicalLocation']
    #    id=f[u'MeshBlock0'].attrs[u'GlobalID']

    if quantities is None:
        quantities = f[u'MeshBlock0'].keys()
    quantities=[str(q) for q in quantities \
                if q != 'x1f' and q != 'x2f' and q != 'x3f']

    grid_data = [dict() for x in range(nblock)]

    blockid = 0
    root_lx1 = x1f[0]
    root_rx1 = x1f[nx]
    root_lx2 = x2f[0]
    root_rx2 = x2f[ny]
    root_lx3 = x3f[0]
    root_rx3 = x3f[nz]

    block_size = block_size

    for block in f.itervalues():
        level = block.attrs['Level'][0]
        x1f = np.array(block[u'x1f'])
        x2f = np.array(block[u'x2f'])
        x3f = np.array(block[u'x3f'])
        root_lx1 = min(x1f[0], root_lx1)
        root_rx1 = max(x1f[nx], root_rx1)
        root_lx2 = min(x2f[0], root_lx2)
        root_rx2 = max(x2f[ny], root_rx2)
        root_lx3 = min(x3f[0], root_lx3)
        root_rx3 = max(x3f[nz], root_rx3)
        left = [x1f[0], x2f[0], x3f[0]]
        right = [x1f[nx], x2f[ny], x3f[nz]]
        grid_data[blockid]['left_edge'] = left
        grid_data[blockid]['right_edge'] = right
        grid_data[blockid]['level'] = level
        # the block size is (nx, ny, nz)
        # we need (nz, ny , nx) corresponding to 3D array

        grid_data[blockid]['dimensions'] = block_size
        for q in quantities:
            grid_data[blockid][q] = np.reshape(np.ravel(np.array(block[q]),
                                                        order='C'),
                                               (nx, ny, nz),
                                               order='F')
            if q == 'Er' or q=='Er0' or q=='Pr11' or q=='Pr12' \
               or q=='Pr13' or q=='Pr21' or q=='Pr22' or \
               q=='Pr23' or q=='Pr31' or q=='Pr32' or q=='Pr33':
                grid_data[blockid][q] = grid_data[blockid][q] * Prat
            elif q=='Fr01' or q=='Fr02' or q=='Fr03' or q=='Fr1' \
               or q=='Fr2' or q=='Fr3':
                grid_data[blockid][q] = grid_data[blockid][q] * Prat / Crat
        print blockid, nblock
        blockid = blockid + 1

    # close the file
    f.close()

    # the unit
    field_units = dict()
    for q in quantities:
        if q=='Er' or q=='Er0' or q=='Pr11' or q=='Pr12' \
           or q=='Pr13' or q=='Pr21' or q=='Pr22' or \
              q=='Pr23' or q=='Pr31' or q=='Pr32' or q=='Pr33' \
              or q=='press':
            field_units[q] = 'code_mass/(code_time**2*code_length)'
        elif q=='Fr01' or q=='Fr02' or q=='Fr03' or q=='Fr1' \
            or q=='Fr2' or q=='Fr3':
            field_units[q] = 'code_mass*code_length/code_time'
        elif q == 'vel1' or q == 'vel2' or q == 'vel3':
            field_units[q] = 'code_length/code_time'
        elif q == 'rho':
            field_units[q] = 'code_mass/code_length**3'

        bbox = np.array([[root_lx1, root_rx1], [root_lx2, root_rx2],
                         [root_lx3, root_rx3]])
        if flag == 'yt':
            ds = yt.load_amr_grids(grid_data,
                                   block_size,
                                   field_units=field_units,
                                   sim_time=time,
                                   bbox=bbox,
                                   geometry=geometry)
            return ds
        else:
            return grid_data
Пример #13
0
domain_dimensions = [32, 32, 32]

# Get
xmin, xmax, ymin, ymax, zmin, zmax = get_cell_coordinates(config.tree, read_method="pandas")

ncells = len(xmin)
#grid_data = []
#for index in range(ncells):
#cell = dict(left_edge=[xmin[index],ymin[index],zmin[index]], right_edge=[xmax[index],ymax[index],zmax[index]],
#                dimensions=domain_dimensions, density=values[index], level=0)
#    grid_data.append(cell)

grid_data = dict(left_edge=[xmin,ymin,zmin], right_edge=[xmax,ymax,zmax], level=0, dimensions=domain_dimensions)

bbox = None
ds = yt.load_amr_grids(grid_data, domain_dimensions, bbox=bbox)

# load_amr_grids(grid_data, domain_dimensions,
#                    bbox=None, sim_time=0.0, length_unit=None,
#                    mass_unit=None, time_unit=None, velocity_unit=None,
#                    magnetic_unit=None, periodicity=(True, True, True),
#                    geometry="cartesian", refine_by=2, unit_system="cgs"):
#     r"""Load a set of grids of data into yt as a
#     :class:`~yt.frontends.stream.data_structures.StreamHandler`.
#     This should allow a sequence of grids of varying resolution of data to be
#     loaded directly into yt and analyzed as would any others.  This comes with
#     several caveats:
#
#     * Units will be incorrect unless the unit system is explicitly specified.
#     * Some functions may behave oddly, and parallelism will be
#       disappointing or non-existent in most cases.
Пример #14
0
    def temporary(self, it=0):
        headers = select_header_h5(self.header, self.var, it=it)
        dsets = []
        for item in headers:
            dsets.append(dataset_yt(headers[item]['file'], item, self.var))
        return yt.load_amr_grids(dsets, [1000, 1000, 1000])
        # return AMRGrid(dset, self.dim, self.var)
        # return dsets

    # @property
    # def it(self):
    #     return self.dataset['it'].unique().astype(int).tolist()

    # @property
    # def time(self):
    #     return pd.Series(self.dataset['time'].values, index=self.dataset['it'].astype(int)).drop_duplicates().sort_index()

    # @property
    # def time(self):
    #     p = []
    #     for item in self.dataset:
    #         time.append(self.dataset[item]['time'])
    #     return time

    # def temporary(self, it=0):
    #     headers = select_header_h5(self.dataset, self.var, it=it)
    #     dsets = []
    #     for item in headers:
    #         dsets.append(dataset_h5(self.dataset[item]['file'], item))
    #     # return AMRGrid(dset, self.dim, self.var, 'hdf5')
    #     return dsets

    # def grid_hierarchies(self):
    #     """
    #     Describes the geometry of the refined grid hierarchies, such as component number, ghost zones and refinement level. Grid hierarchies may change in the evolution. These all get from the header of files.

    #     :return: a dict about grid_hierarchies
    #     """
    #     parser = re.compile(r'([^:]+)::(\S+) it=(\d+) tl=(\d+)( m=0)? rl=(\d+)( c=(\d+))?')
    #     for var in self.varfiles.keys():
    #         for file in self.varfiles[var]:
    #             filename = os.path.basename(file)
    #             if filename in files:
    #                 continue
    #     return None

    # def slice(self, meshgrid='HYDROBASE::press it=0 tl=0 rl=0 c=10'):
    #     """
    #     CarpetIOHDF5 is different with CarpetIOASCII. We don't need read all data in the beginning. 2-D or 3-D data is huge, reading all data at one time is a waste of resources.

    #     :return: DataFrame
    #     """
    #     with read(self.files[0]) as f:
    #         mesh = f[meshgrid]
    #         delta = mesh.attrs['delta']
    #         origin = mesh.attrs['origin']
    #         sizeA = mesh.shape
    #         tmpX = np.arange(0,sizeA[1])*delta[0]+origin[0]
    #         tmpY = np.arange(0,sizeA[0])*delta[1]+origin[1]

    #         grid = np.meshgrid(tmpX, tmpY)
    #         data = np.array(mesh)
    #     return grid, data


# def merge_filedata(filelist):
#     p = []
#     for file in filelist:
#         with read(file) as f:
#             for dset in sorted(list(f)):
#                 infos = dict()
#                 REG = re.match('(\S+)::(\S+) it=(\d+)',dset)
#                 if REG:
#                     infos['group'] = REG.groups()[0]
#                     infos['var']   = REG.groups()[1]
#                     infos['it']    = int(REG.groups()[2])
#                 REG = re.search('tl=(\d+)',dset);
#                 if REG:
#                     infos['tl']=int(REG.groups()[0])
#                 REG = re.search('rl=(\d+)',dset)
#                 if REG:
#                     infos['rl']=int(REG.groups()[0])
#                 REG = re.search('c=(\d+)',dset)
#                 if REG:
#                     infos['c']=int(REG.groups()[0])

#                 subgrid = f[dset]
#                 try:
#                     delta = subgrid.attrs['delta']
#                     origin = subgrid.attrs['origin']
#                     size = subgrid.shape
#                     dim = len(size)
#                     coord = ['x', 'y', 'z']
#                     for i in range(dim) :
#                         infos[coord[i]] = np.arange(0,size[(dim-1)-i])*delta[i]+origin[i]
#                 except:
#                     print(dset)
#                 infos['data'] = np.array(subgrid)
#                 p.append(infos)

#     return p

# def hdf5_2d(X, Y, data, title=None, colormap='RdBu'):
#     """
#     Create a pseudocolor plot

#     .. note::

#         The dimensions of X and Y should be one greater than those of C. Alternatively, X, Y and C may have equal dimensions, in which case the last row and column of C will be ignored.

#     :param array X: A 1-D array. They will be expanded as needed into the appropriate 2-D arrays, making a rectangular grid.
#     :param array Y: A 1-D array. They will be expanded as needed into the appropriate 2-D arrays, making a rectangular grid.
#     :param array data: A scalar 2-D array. The values will be color-mapped.
#     :param str title: Set figure title.
#     :param str colormap: A Colormap name. The colormap maps the C values to colors.
#     """
#     size = data.shape
#     fig, ax = plt.subplots()
#     tmpX, tmpY = np.meshgrid(X, Y)
#     im = plt.pcolormesh(tmpX, tmpY, data, cmap=colormap)
#     plt.colorbar(im)
#     ax.set_title(title)
#     plt.show()