Esempio n. 1
0
    def test_flip_depths(self):
        ncfile1 = os.path.join(os.path.dirname(__file__), 'resources', 'sensor_with_depths_3.nc')
        ncd1 = EnhancedDataset(ncfile1)
        ncvar1 = ncd1.variables['soil_moisture_percent']
        df1 = get_dataframe_from_variable(ncd1, ncvar1)

        assert np.allclose(df1.depth.unique(), np.asarray([-0.0508, -0.2032, -0.508]))
Esempio n. 2
0
    def test_sensor_with_depths(self):
        ncfile1 = os.path.join(os.path.dirname(__file__), 'resources', 'sensor_with_depths_1.nc')
        ncd1 = EnhancedDataset(ncfile1)
        ncvar1 = ncd1.variables['soil_moisture_percent']
        df1 = get_dataframe_from_variable(ncd1, ncvar1)
        ncd1.close()

        ncfile2 = os.path.join(os.path.dirname(__file__), 'resources', 'sensor_with_depths_2.nc')
        ncd2 = EnhancedDataset(ncfile2)
        ncvar2 = ncd2.variables['soil_moisture_percent']
        df2 = get_dataframe_from_variable(ncd2, ncvar2)
        ncd2.close()

        df = df2.combine_first(df1)

        assert not df.empty
Esempio n. 3
0
 def is_valid(cls, uri):
     try:
         with EnhancedDataset(uri) as ds:
             try:
                 SGrid(ds)
                 return True
             except ValueError:
                 if 'sgrid' in ds.Conventions.lower():
                     return True
                 else:
                     return False
     except RuntimeError:
         try:
             with EnhancedMFDataset(uri, aggdim='time') as ds:
                 try:
                     SGrid(ds)
                     return True
                 except ValueError:
                     if 'sgrid' in ds.Conventions.lower():
                         return True
                     else:
                         return False
         except (OSError, IndexError, AttributeError, RuntimeError,
                 ValueError):
             return False
     except (OSError, FileNotFoundError, AttributeError):
         return False
Esempio n. 4
0
def netcdf4_dataset(dataset):
    try:
        return EnhancedDataset(path(dataset))
    except:
        try:
            return EnhancedMFDataset(path(dataset), aggdim='time')
        except:
            return None
Esempio n. 5
0
 def topology(self):
     try:
         self._topology.variables
         yield self._topology
     except AttributeError:
         try:
             self._topology = EnhancedDataset(self.topology_file)
             yield self._topology
         except RuntimeError:
             yield None
Esempio n. 6
0
 def is_valid(cls, uri):
     try:
         with EnhancedDataset(uri) as ds:
             return 'ugrid' in ds.Conventions.lower()
     except RuntimeError:
         try:
             with EnhancedMFDataset(uri, aggdim='time') as ds:
                 return 'ugrid' in ds.Conventions.lower()
         except (AttributeError, RuntimeError):
             return False
     except AttributeError:
         return False
Esempio n. 7
0
 def dataset(self):
     try:
         # Dataset is already loaded
         self._dataset.variables
         yield self._dataset
     except AttributeError:
         try:
             self._dataset = EnhancedDataset(self.path())
             yield self._dataset
         except (OSError, RuntimeError, FileNotFoundError):
             try:
                 self._dataset = EnhancedMFDataset(self.path(), aggdim='time')
                 yield self._dataset
             except (OSError, IndexError, RuntimeError, FileNotFoundError):
                 yield None
Esempio n. 8
0
 def dataset(self):
     try:
         # Dataset is already loaded
         self._dataset.variables
         yield self._dataset
     except AttributeError:
         try:
             self._dataset = EnhancedDataset(self.path())
             yield self._dataset
         except RuntimeError:
             try:
                 self._dataset = EnhancedMFDataset(self.path(),
                                                   aggdim='time')
                 yield self._dataset
             except RuntimeError:
                 yield None
Esempio n. 9
0
 def is_valid(uri):
     try:
         with EnhancedDataset(uri) as ds:
             nc_ds = SGrid(ds)
             return nc_ds.sgrid_compliant_file(
             ) or 'sgrid' in ds.Conventions.lower()
     except RuntimeError:
         try:
             with EnhancedMFDataset(uri, aggdim='time') as ds:
                 nc_ds = SGrid(ds)
                 return nc_ds.sgrid_compliant_file(
                 ) or 'sgrid' in ds.Conventions.lower()
         except (AttributeError, RuntimeError, SGridNonCompliantError):
             return False
     except (AttributeError, SGridNonCompliantError):
         return False
Esempio n. 10
0
    def update_cache(self, force=False):
        with self.dataset() as nc:
            ug = UGrid.from_nc_dataset(nc=nc)
            ug.save_as_netcdf(self.topology_file)

            if not os.path.exists(self.topology_file):
                logger.error(
                    "Failed to create topology_file cache for Dataset '{}'".
                    format(self.dataset))
                return

            time_vars = nc.get_variables_by_attributes(standard_name='time')
            time_dims = list(
                itertools.chain.from_iterable(
                    [time_var.dimensions for time_var in time_vars]))
            unique_time_dims = list(set(time_dims))
            with EnhancedDataset(self.topology_file, mode='a') as cached_nc:
                # create pertinent time dimensions if they aren't already present
                for unique_time_dim in unique_time_dims:
                    dim_size = len(nc.dimensions[unique_time_dim])
                    try:
                        cached_nc.createDimension(unique_time_dim,
                                                  size=dim_size)
                    except RuntimeError:
                        continue

                # support cases where there may be more than one variable with standard_name='time' in a dataset
                for time_var in time_vars:
                    try:
                        time_var_obj = cached_nc.createVariable(
                            time_var._name, time_var.dtype,
                            time_var.dimensions)
                    except RuntimeError:
                        time_var_obj = cached_nc.variables[time_var.name]

                    time_var_obj[:] = time_var[:]
                    time_var_obj.units = time_var.units
                    time_var_obj.standard_name = 'time'

            # Now do the RTree index
            self.make_rtree()

        self.cache_last_updated = datetime.utcnow().replace(tzinfo=pytz.utc)
        self.save()
Esempio n. 11
0
def forward(apps, schema_editor):
    Layer = apps.get_model('wms', 'Layer')
    Dataset = apps.get_model('wms', 'Dataset')

    for d in Dataset.objects.all():
        nc = None
        try:
            nc = EnhancedDataset(d.uri)
        except:
            try:
                nc = EnhancedMFDataset(d.uri, aggdim='time')
            except:
                pass

        if nc is not None:
            for v in nc.variables:
                nc_var = nc.variables[v]
                l, _ = Layer.objects.get_or_create(dataset_id=d.id, var_name=v)
                if hasattr(nc_var, 'units'):
                    l.units = nc_var.units
                    l.save()
Esempio n. 12
0
 def setUp(self):
     netcdf_file = os.path.join(os.path.dirname(__file__), "resources/coamps/cencoos_4km/wnd_tru/10m/coamps_cencoos_4km_wnd_tru_10m_2014-06-20-00_2014.171.00.nc")
     self.nc = EnhancedDataset(netcdf_file)