Exemplo n.º 1
0
    def _parse_parameter_file(self):
        self._handle = NetCDF4FileHandler(self.parameter_filename)
        with self._handle.open_ds() as ds:
            self._read_glo_var()
            self.dimensionality = ds.variables["coor_names"].shape[0]
            self.parameters["info_records"] = self._load_info_records()
            self.unique_identifier = self._get_unique_identifier()
            self.num_steps = len(ds.variables["time_whole"])
            self.current_time = self._get_current_time()
            self.parameters["num_meshes"] = ds.variables["eb_status"].shape[0]
            self.parameters["elem_names"] = self._get_elem_names()
            self.parameters["nod_names"] = self._get_nod_names()
            self.domain_left_edge, self.domain_right_edge = self._load_domain_edge()
            self._periodicity = (False, False, False)

        # These attributes don't really make sense for unstructured
        # mesh data, but yt warns if they are not present, so we set
        # them to dummy values here.
        self.domain_dimensions = np.ones(3, "int32")
        self.cosmological_simulation = 0
        self.current_redshift = 0
        self.omega_lambda = 0
        self.omega_matter = 0
        self.hubble_constant = 0
        self.refine_by = 0
Exemplo n.º 2
0
 def __init__(self, ds):
     self.filename = ds.index_filename
     exodus_ii_handler = NetCDF4FileHandler(self.filename)
     self.handler = exodus_ii_handler.dataset
     super(IOHandlerExodusII, self).__init__(ds)
     self.node_fields = ds._get_nod_names()
     self.elem_fields = ds._get_elem_names()
Exemplo n.º 3
0
 def _get_fluid_types(self):
     with NetCDF4FileHandler(self.parameter_filename).open_ds() as ds:
         fluid_types = ()
         i = 1
         while True:
             ftype = "connect%d" % i
             if ftype in ds.variables:
                 fluid_types += (ftype,)
                 i += 1
             else:
                 break
         fluid_types += ("all",)
         return fluid_types
Exemplo n.º 4
0
    def _is_valid(cls, filename, *args, **kwargs):
        # This accepts a filename or a set of arguments and returns True or
        # False depending on if the file is of the type requested.

        warn_netcdf(filename)
        try:
            nc4_file = NetCDF4FileHandler(filename)
            with nc4_file.open_ds(keepweakref=True) as _handle:
                is_cm1_lofs = hasattr(_handle, "cm1_lofs_version")
                is_cm1 = hasattr(_handle,
                                 "cm1 version")  # not a typo, it is a space...

                # ensure coordinates of each variable array exists in the dataset
                coords = _handle.dimensions  # get the dataset wide coordinates
                failed_vars = []  # list of failed variables
                for var in _handle.variables:  # iterate over the variables
                    vcoords = _handle[
                        var].dimensions  # get the dims for the variable
                    ncoords = len(vcoords)  # number of coordinates in variable
                    # number of coordinates that pass for a variable
                    coordspassed = sum(vc in coords for vc in vcoords)
                    if coordspassed != ncoords:
                        failed_vars.append(var)

                if failed_vars:
                    mylog.warning(
                        "Trying to load a cm1_lofs netcdf file but the "
                        "coordinates of the following fields do not match the "
                        "coordinates of the dataset: %s",
                        failed_vars,
                    )
                    return False

            if not is_cm1_lofs:
                if is_cm1:
                    mylog.warning(
                        "It looks like you are trying to load a cm1 netcdf file, "
                        "but at present yt only supports cm1_lofs output. Until"
                        " support is added, you can likely use"
                        " yt.load_uniform_grid() to load your cm1 file manually."
                    )
                return False
        except (OSError, AttributeError, ImportError):
            return False

        return True
Exemplo n.º 5
0
 def __init__(
     self,
     filename,
     dataset_type="cm1",
     storage_filename=None,
     units_override=None,
     unit_system="mks",
 ):
     self.fluid_types += ("cm1", )
     self._handle = NetCDF4FileHandler(filename)
     # refinement factor between a grid and its subgrid
     self.refine_by = 1
     super().__init__(
         filename,
         dataset_type,
         units_override=units_override,
         unit_system=unit_system,
     )
     self.storage_filename = storage_filename
Exemplo n.º 6
0
    def _is_valid(cls, filename, *args, **kwargs):
        # This accepts a filename or a set of arguments and returns True or
        # False depending on if the file is of the type requested.

        warn_netcdf(filename)
        is_cfrad = False
        try:
            # note that we use the NetCDF4FileHandler here to avoid some
            # issues with xarray opening datasets it cannot handle. Once
            # a dataset is as identified as a CFRadialDataset, xarray is used
            # for opening. See https://github.com/yt-project/yt/issues/3987
            nc4_file = NetCDF4FileHandler(filename)
            with nc4_file.open_ds(keepweakref=True) as ds:
                con = "Conventions"  # the attribute to check for file conventions
                cons = ""  # the value of the Conventions attribute
                for c in [con, con.lower()]:
                    if hasattr(ds, c):
                        cons += getattr(ds, c)
                is_cfrad = "CF/Radial" in cons
        except (OSError, AttributeError, ImportError):
            return False

        return is_cfrad
Exemplo n.º 7
0
 def __init__(self, ds):
     self.filename = ds.filename
     self._handle = NetCDF4FileHandler(self.filename)
     super().__init__(ds)