Exemple #1
0
 def topology(self):
     try:
         self._topology.variables
         yield self._topology
     except AttributeError:
         try:
             self._topology = EnhancedDataset(self.topology_file)
             yield self._topology
         except RuntimeError:
             yield None
Exemple #2
0
 def is_valid(cls, uri):
     try:
         with EnhancedDataset(uri) as ds:
             try:
                 SGrid(ds)
                 return True
             except ValueError:
                 if 'sgrid' in ds.Conventions.lower():
                     return True
                 else:
                     return False
     except RuntimeError:
         try:
             with EnhancedMFDataset(uri, aggdim='time') as ds:
                 try:
                     SGrid(ds)
                     return True
                 except ValueError:
                     if 'sgrid' in ds.Conventions.lower():
                         return True
                     else:
                         return False
         except (OSError, IndexError, AttributeError, RuntimeError,
                 ValueError):
             return False
     except (OSError, FileNotFoundError, AttributeError):
         return False
Exemple #3
0
    def test_flip_depths(self):
        ncfile1 = os.path.join(os.path.dirname(__file__), 'resources', 'sensor_with_depths_3.nc')
        ncd1 = EnhancedDataset(ncfile1)
        ncvar1 = ncd1.variables['soil_moisture_percent']
        df1 = get_dataframe_from_variable(ncd1, ncvar1)

        assert np.allclose(df1.depth.unique(), np.asarray([-0.0508, -0.2032, -0.508]))
def netcdf4_dataset(dataset):
    try:
        return EnhancedDataset(path(dataset))
    except:
        try:
            return EnhancedMFDataset(path(dataset), aggdim='time')
        except:
            return None
Exemple #5
0
 def topology(self):
     try:
         self._topology.variables
         yield self._topology
     except AttributeError:
         try:
             self._topology = EnhancedDataset(self.topology_file)
             yield self._topology
         except RuntimeError:
             yield None
Exemple #6
0
 def is_valid(cls, uri):
     try:
         with EnhancedDataset(uri) as ds:
             return 'ugrid' in ds.Conventions.lower()
     except RuntimeError:
         try:
             with EnhancedMFDataset(uri, aggdim='time') as ds:
                 return 'ugrid' in ds.Conventions.lower()
         except (AttributeError, RuntimeError):
             return False
     except AttributeError:
         return False
class EnhancedDatasetTests(unittest.TestCase):

    def setUp(self):
        netcdf_file = os.path.join(os.path.dirname(__file__), "resources/coamps/cencoos_4km/wnd_tru/10m/coamps_cencoos_4km_wnd_tru_10m_2014-06-20-00_2014.171.00.nc")
        self.nc = EnhancedDataset(netcdf_file)

    def test_multiple_close(self):
        """ Closing the Dataset twice should not raise an error """
        self.nc.close()
        self.nc.close()

    def test_find_variables_by_single_attribute(self):
        vs = self.nc.get_variables_by_attributes(standard_name='projection_y_coordinate')
        self.assertEqual(len(vs), 1)

        vs = self.nc.get_variables_by_attributes(grid_spacing='4.0 km')
        self.assertEqual(len(vs), 2)

    def test_find_variables_by_multiple_attribute(self):
        vs = self.nc.get_variables_by_attributes(grid_spacing='4.0 km', standard_name='projection_y_coordinate')
        self.assertEqual(len(vs), 1)

    def test_find_variables_by_single_lambda(self):
        vs = self.nc.get_variables_by_attributes(_CoordinateAxisType=lambda v: v in ['Time', 'GeoX', 'GeoY'])
        self.assertEqual(len(vs), 3)

        vs = self.nc.get_variables_by_attributes(grid_mapping=lambda v: v is not None)
        self.assertEqual(len(vs), 2)

    def test_find_variables_by_multiple_lambdas(self):
        vs = self.nc.get_variables_by_attributes(grid_mapping=lambda v: v is not None, long_name=lambda v: v is not None and 'v_component' in v)
        self.assertEqual(len(vs), 1)

    def test_find_variables_by_attribute_and_lambda(self):
        vs = self.nc.get_variables_by_attributes(grid_mapping=lambda v: v is not None, units='m/s')
        self.assertEqual(len(vs), 2)

        vs = self.nc.get_variables_by_attributes(grid_mapping=lambda v: v is not None, long_name='v_component_wind_true_direction_all_geometries @ height_above_ground')
        self.assertEqual(len(vs), 1)
class EnhancedDatasetTests(unittest.TestCase):

    def setUp(self):
        netcdf_file = os.path.join(os.path.dirname(__file__), "resources/coamps/cencoos_4km/wnd_tru/10m/coamps_cencoos_4km_wnd_tru_10m_2014-06-20-00_2014.171.00.nc")
        self.nc = EnhancedDataset(netcdf_file)

    def test_multiple_close(self):
        """ Closing the Dataset twice should not raise an error """
        self.nc.close()
        self.nc.close()

    def test_find_variables_by_single_attribute(self):
        vs = self.nc.get_variables_by_attributes(standard_name='projection_y_coordinate')
        self.assertEqual(len(vs), 1)

        vs = self.nc.get_variables_by_attributes(grid_spacing='4.0 km')
        self.assertEqual(len(vs), 2)

    def test_find_variables_by_multiple_attribute(self):
        vs = self.nc.get_variables_by_attributes(grid_spacing='4.0 km', standard_name='projection_y_coordinate')
        self.assertEqual(len(vs), 1)

    def test_find_variables_by_single_lambda(self):
        vs = self.nc.get_variables_by_attributes(_CoordinateAxisType=lambda v: v in ['Time', 'GeoX', 'GeoY'])
        self.assertEqual(len(vs), 3)

        vs = self.nc.get_variables_by_attributes(grid_mapping=lambda v: v is not None)
        self.assertEqual(len(vs), 2)

    def test_find_variables_by_multiple_lambdas(self):
        vs = self.nc.get_variables_by_attributes(grid_mapping=lambda v: v is not None, long_name=lambda v: v is not None and 'v_component' in v)
        self.assertEqual(len(vs), 1)

    def test_find_variables_by_attribute_and_lambda(self):
        vs = self.nc.get_variables_by_attributes(grid_mapping=lambda v: v is not None, units='m/s')
        self.assertEqual(len(vs), 2)

        vs = self.nc.get_variables_by_attributes(grid_mapping=lambda v: v is not None, long_name='v_component_wind_true_direction_all_geometries @ height_above_ground')
        self.assertEqual(len(vs), 1)
Exemple #9
0
    def test_sensor_with_depths(self):
        ncfile1 = os.path.join(os.path.dirname(__file__), 'resources', 'sensor_with_depths_1.nc')
        ncd1 = EnhancedDataset(ncfile1)
        ncvar1 = ncd1.variables['soil_moisture_percent']
        df1 = get_dataframe_from_variable(ncd1, ncvar1)
        ncd1.close()

        ncfile2 = os.path.join(os.path.dirname(__file__), 'resources', 'sensor_with_depths_2.nc')
        ncd2 = EnhancedDataset(ncfile2)
        ncvar2 = ncd2.variables['soil_moisture_percent']
        df2 = get_dataframe_from_variable(ncd2, ncvar2)
        ncd2.close()

        df = df2.combine_first(df1)

        assert not df.empty
Exemple #10
0
 def dataset(self):
     try:
         # Dataset is already loaded
         self._dataset.variables
         yield self._dataset
     except AttributeError:
         try:
             self._dataset = EnhancedDataset(self.path())
             yield self._dataset
         except (OSError, RuntimeError, FileNotFoundError):
             try:
                 self._dataset = EnhancedMFDataset(self.path(), aggdim='time')
                 yield self._dataset
             except (OSError, IndexError, RuntimeError, FileNotFoundError):
                 yield None
Exemple #11
0
 def is_valid(uri):
     try:
         with EnhancedDataset(uri) as ds:
             nc_ds = SGrid(ds)
             return nc_ds.sgrid_compliant_file(
             ) or 'sgrid' in ds.Conventions.lower()
     except RuntimeError:
         try:
             with EnhancedMFDataset(uri, aggdim='time') as ds:
                 nc_ds = SGrid(ds)
                 return nc_ds.sgrid_compliant_file(
                 ) or 'sgrid' in ds.Conventions.lower()
         except (AttributeError, RuntimeError, SGridNonCompliantError):
             return False
     except (AttributeError, SGridNonCompliantError):
         return False
Exemple #12
0
 def dataset(self):
     try:
         # Dataset is already loaded
         self._dataset.variables
         yield self._dataset
     except AttributeError:
         try:
             self._dataset = EnhancedDataset(self.path())
             yield self._dataset
         except RuntimeError:
             try:
                 self._dataset = EnhancedMFDataset(self.path(),
                                                   aggdim='time')
                 yield self._dataset
             except RuntimeError:
                 yield None
Exemple #13
0
    def update_cache(self, force=False):
        with self.dataset() as nc:
            ug = UGrid.from_nc_dataset(nc=nc)
            ug.save_as_netcdf(self.topology_file)

            if not os.path.exists(self.topology_file):
                logger.error(
                    "Failed to create topology_file cache for Dataset '{}'".
                    format(self.dataset))
                return

            time_vars = nc.get_variables_by_attributes(standard_name='time')
            time_dims = list(
                itertools.chain.from_iterable(
                    [time_var.dimensions for time_var in time_vars]))
            unique_time_dims = list(set(time_dims))
            with EnhancedDataset(self.topology_file, mode='a') as cached_nc:
                # create pertinent time dimensions if they aren't already present
                for unique_time_dim in unique_time_dims:
                    dim_size = len(nc.dimensions[unique_time_dim])
                    try:
                        cached_nc.createDimension(unique_time_dim,
                                                  size=dim_size)
                    except RuntimeError:
                        continue

                # support cases where there may be more than one variable with standard_name='time' in a dataset
                for time_var in time_vars:
                    try:
                        time_var_obj = cached_nc.createVariable(
                            time_var._name, time_var.dtype,
                            time_var.dimensions)
                    except RuntimeError:
                        time_var_obj = cached_nc.variables[time_var.name]

                    time_var_obj[:] = time_var[:]
                    time_var_obj.units = time_var.units
                    time_var_obj.standard_name = 'time'

            # Now do the RTree index
            self.make_rtree()

        self.cache_last_updated = datetime.utcnow().replace(tzinfo=pytz.utc)
        self.save()
def forward(apps, schema_editor):
    Layer = apps.get_model('wms', 'Layer')
    Dataset = apps.get_model('wms', 'Dataset')

    for d in Dataset.objects.all():
        nc = None
        try:
            nc = EnhancedDataset(d.uri)
        except:
            try:
                nc = EnhancedMFDataset(d.uri, aggdim='time')
            except:
                pass

        if nc is not None:
            for v in nc.variables:
                nc_var = nc.variables[v]
                l, _ = Layer.objects.get_or_create(dataset_id=d.id, var_name=v)
                if hasattr(nc_var, 'units'):
                    l.units = nc_var.units
                    l.save()
Exemple #15
0
    def test_sensor_with_depths(self):
        ncfile1 = os.path.join(os.path.dirname(__file__), 'resources', 'sensor_with_depths_1.nc')
        ncd1 = EnhancedDataset(ncfile1)
        ncvar1 = ncd1.variables['soil_moisture_percent']
        df1 = get_dataframe_from_variable(ncd1, ncvar1)
        ncd1.close()

        ncfile2 = os.path.join(os.path.dirname(__file__), 'resources', 'sensor_with_depths_2.nc')
        ncd2 = EnhancedDataset(ncfile2)
        ncvar2 = ncd2.variables['soil_moisture_percent']
        df2 = get_dataframe_from_variable(ncd2, ncvar2)
        ncd2.close()

        df = df2.combine_first(df1)

        assert not df.empty
Exemple #16
0
class NetCDFDataset(object):

    @contextmanager
    def dataset(self):
        try:
            # Dataset is already loaded
            self._dataset.variables
            yield self._dataset
        except AttributeError:
            try:
                self._dataset = EnhancedDataset(self.path())
                yield self._dataset
            except (OSError, RuntimeError, FileNotFoundError):
                try:
                    self._dataset = EnhancedMFDataset(self.path(), aggdim='time')
                    yield self._dataset
                except (OSError, IndexError, RuntimeError, FileNotFoundError):
                    yield None

    @contextmanager
    def topology(self):
        try:
            self._topology.variables
            yield self._topology
        except AttributeError:
            try:
                self._topology = EnhancedDataset(self.topology_file)
                yield self._topology
            except RuntimeError:
                yield None

    def close(self):
        try:
            self._dataset.close()
        except BaseException:
            pass

        try:
            self._topology.close()
        except BaseException:
            pass

    @property
    def topology_file(self):
        return os.path.join(settings.TOPOLOGY_PATH, '{}.nc'.format(self.safe_filename))

    @property
    def time_cache_file(self):
        return os.path.join(settings.TOPOLOGY_PATH, '{}.npy'.format(self.safe_filename))

    @property
    def domain_file(self):
        return os.path.join(settings.TOPOLOGY_PATH, '{}.domain'.format(self.safe_filename))

    @property
    def node_tree_root(self):
        return os.path.join(settings.TOPOLOGY_PATH, '{}.nodes').format(self.safe_filename)

    @property
    def node_tree_data_file(self):
        return '{}.dat'.format(self.node_tree_root)

    @property
    def node_tree_index_file(self):
        return '{}.idx'.format(self.node_tree_root)

    @property
    def face_tree_root(self):
        return os.path.join(settings.TOPOLOGY_PATH, '{}.faces').format(self.safe_filename)

    @property
    def face_tree_data_file(self):
        return '{}.dat'.format(self.face_tree_root)

    @property
    def face_tree_index_file(self):
        return '{}.idx'.format(self.face_tree_root)

    def setup_getfeatureinfo(self, layer, request, location=None):

        location = location or 'face'
        tree = None

        try:
            latitude = request.GET['latitude']
            longitude = request.GET['longitude']
            # Find closest cell or node (only node for now)
            if location == 'face':
                tree = rtree.index.Index(self.face_tree_root)
            elif location == 'node':
                tree = rtree.index.Index(self.node_tree_root)
            else:
                raise NotImplementedError("No RTree for location '{}'".format(location))

            try:
                nindex = list(tree.nearest((longitude, latitude, longitude, latitude), 1, objects=True))[0]
            except IndexError:
                raise ValueError("No cells in the {} tree for point {}, {}".format(location, longitude, latitude))
            closest_x, closest_y = tuple(nindex.bbox[2:])
            geo_index = nindex.object
        except BaseException:
            raise
        finally:
            if tree is not None:
                tree.close()

        all_times = self.times(layer)

        start_nc_index = np.searchsorted(all_times, request.GET['starting'], side='left')
        start_nc_index = min(start_nc_index, len(all_times) - 1)

        end_nc_index = np.searchsorted(all_times, request.GET['ending'], side='right')
        end_nc_index = max(end_nc_index, 1)  # Always pull the first index

        return_dates = all_times[start_nc_index:end_nc_index]

        return geo_index, closest_x, closest_y, start_nc_index, end_nc_index, return_dates

    def __del__(self):
        self.close()

    def analyze_virtual_layers(self):
        with self.dataset() as nc:
            if nc is not None:
                # Earth Projected Sea Water Velocity
                u_names = ['eastward_sea_water_velocity', 'eastward_sea_water_velocity_assuming_no_tide']
                v_names = ['northward_sea_water_velocity', 'northward_sea_water_velocity_assuming_no_tide']
                us = nc.get_variables_by_attributes(standard_name=lambda v: v in u_names)
                vs = nc.get_variables_by_attributes(standard_name=lambda v: v in v_names)
                VirtualLayer.make_vector_layer(us, vs, 'sea_water_velocity', 'vectors', self.id)

                # Grid projected Sea Water Velocity
                u_names = ['x_sea_water_velocity', 'grid_eastward_sea_water_velocity']
                v_names = ['y_sea_water_velocity', 'grid_northward_sea_water_velocity']
                us = nc.get_variables_by_attributes(standard_name=lambda v: v in u_names)
                vs = nc.get_variables_by_attributes(standard_name=lambda v: v in v_names)
                VirtualLayer.make_vector_layer(us, vs, 'grid_sea_water_velocity', 'vectors', self.id)

                # Earth projected Winds
                u_names = ['eastward_wind']
                v_names = ['northward_wind']
                us = nc.get_variables_by_attributes(standard_name=lambda v: v in u_names)
                vs = nc.get_variables_by_attributes(standard_name=lambda v: v in v_names)
                # Hopefully we support barbs eventually
                VirtualLayer.make_vector_layer(us, vs, 'winds', 'barbs', self.id)

                # Grid projected Winds
                u_names = ['x_wind', 'grid_eastward_wind']
                v_names = ['y_wind', 'grid_northward_wind']
                us = nc.get_variables_by_attributes(standard_name=lambda v: v in u_names)
                vs = nc.get_variables_by_attributes(standard_name=lambda v: v in v_names)
                # Hopefully we support barbs eventually
                VirtualLayer.make_vector_layer(us, vs, 'grid_winds', 'barbs', self.id)

                # Earth projected Ice velocity
                u_names = ['eastward_sea_ice_velocity']
                v_names = ['northward_sea_ice_velocity']
                us = nc.get_variables_by_attributes(standard_name=lambda v: v in u_names)
                vs = nc.get_variables_by_attributes(standard_name=lambda v: v in v_names)
                VirtualLayer.make_vector_layer(us, vs, 'sea_ice_velocity', 'vectors', self.id)

    def update_layers(self):
        with self.dataset() as nc:
            if nc is not None:

                for v in nc.variables:
                    l, _ = Layer.objects.get_or_create(dataset_id=self.id, var_name=v)

                    nc_var = nc.variables[v]

                    # *_min and *_max attributes take presendence over the *_range attributes
                    # scale_* attributes take presedence over valid_* attributes

                    # *_range
                    if hasattr(nc_var, 'scale_range'):
                        l.default_min = try_float(nc_var.scale_range[0])
                        l.default_max = try_float(nc_var.scale_range[-1])
                    elif hasattr(nc_var, 'valid_range'):
                        l.default_min = try_float(nc_var.valid_range[0])
                        l.default_max = try_float(nc_var.valid_range[-1])

                    # *_min
                    if hasattr(nc_var, 'scale_min'):
                        l.default_min = try_float(nc_var.scale_min)
                    elif hasattr(nc_var, 'valid_min'):
                        l.default_min = try_float(nc_var.valid_min)

                    # *_max
                    if hasattr(nc_var, 'scale_max'):
                        l.default_max = try_float(nc_var.scale_max)
                    elif hasattr(nc_var, 'valid_max'):
                        l.default_max = try_float(nc_var.valid_max)

                    # type
                    if hasattr(nc_var, 'scale_type'):
                        if nc_var.scale_type in ['logarithmic', 'log']:
                            l.logscale = True
                        elif nc_var.scale_type in ['linear']:
                            l.logscale = False

                    if hasattr(nc_var, 'standard_name'):
                        std_name = nc_var.standard_name
                        l.std_name = std_name

                        if len(nc_var.dimensions) > 1:
                            l.active = True

                    if hasattr(nc_var, 'long_name'):
                        l.description = nc_var.long_name

                    if hasattr(nc_var, 'units'):
                        l.units = nc_var.units

                    # Set some standard styles
                    l.styles.add(*Style.defaults())
                    l.save()

        self.analyze_virtual_layers()

    def nearest_time(self, layer, time):
        """
        Return the time index and time value that is closest
        """
        with self.dataset() as nc:
            time_vars = nc.get_variables_by_attributes(standard_name='time')

            if not time_vars:
                return None, None

            if len(time_vars) == 1:
                time_var = time_vars[0]
            else:
                # if there is more than variable with standard_name = time
                # fine the appropriate one to use with the layer
                var_obj = nc.variables[layer.access_name]
                time_var_name = find_appropriate_time(var_obj, time_vars)
                time_var = nc.variables[time_var_name]

            units = time_var.units
            if hasattr(time_var, 'calendar'):
                calendar = time_var.calendar
            else:
                calendar = 'gregorian'
            num_date = round(nc4.date2num(time, units=units, calendar=calendar))

            times = time_var[:]

            time_index = np.searchsorted(times, num_date, side='left')
            time_index = min(time_index, len(times) - 1)  # Don't do over the length of time
            return time_index, times[time_index]
Exemple #17
0
class NetCDFDataset(object):
    @contextmanager
    def dataset(self):
        try:
            # Dataset is already loaded
            self._dataset.variables
            yield self._dataset
        except AttributeError:
            try:
                self._dataset = EnhancedDataset(self.path())
                yield self._dataset
            except RuntimeError:
                try:
                    self._dataset = EnhancedMFDataset(self.path(),
                                                      aggdim='time')
                    yield self._dataset
                except RuntimeError:
                    yield None

    @contextmanager
    def topology(self):
        try:
            self._topology.variables
            yield self._topology
        except AttributeError:
            try:
                self._topology = EnhancedDataset(self.topology_file)
                yield self._topology
            except RuntimeError:
                yield None

    def close(self):
        try:
            self._dataset.close()
        except BaseException:
            pass

        try:
            self._topology.close()
        except BaseException:
            pass

    def has_cache(self):
        return os.path.exists(self.topology_file)

    @property
    def topology_file(self):
        return os.path.join(settings.TOPOLOGY_PATH,
                            '{}.nc'.format(self.safe_filename))

    @property
    def domain_file(self):
        return os.path.join(settings.TOPOLOGY_PATH,
                            '{}.domain'.format(self.safe_filename))

    @property
    def node_tree_root(self):
        return os.path.join(settings.TOPOLOGY_PATH,
                            '{}.nodes').format(self.safe_filename)

    @property
    def node_tree_data_file(self):
        return '{}.dat'.format(self.node_tree_root)

    @property
    def node_tree_index_file(self):
        return '{}.idx'.format(self.node_tree_root)

    @property
    def face_tree_root(self):
        return os.path.join(settings.TOPOLOGY_PATH,
                            '{}.faces').format(self.safe_filename)

    @property
    def face_tree_data_file(self):
        return '{}.dat'.format(self.face_tree_root)

    @property
    def face_tree_index_file(self):
        return '{}.idx'.format(self.face_tree_root)

    def setup_getfeatureinfo(self,
                             ncd,
                             variable_object,
                             request,
                             location=None):

        location = location or 'face'

        try:
            latitude = request.GET['latitude']
            longitude = request.GET['longitude']
            # Find closest cell or node (only node for now)
            if location == 'face':
                tree = rtree.index.Index(self.face_tree_root)
            elif location == 'node':
                tree = rtree.index.Index(self.node_tree_root)
            else:
                raise NotImplementedError(
                    "No RTree for location '{}'".format(location))
            nindex = list(
                tree.nearest((longitude, latitude, longitude, latitude),
                             1,
                             objects=True))[0]
            closest_x, closest_y = tuple(nindex.bbox[2:])
            geo_index = nindex.object
        except BaseException:
            raise
        finally:
            tree.close()

        # Get time indexes
        time_var_name = find_appropriate_time(
            variable_object,
            ncd.get_variables_by_attributes(standard_name='time'))
        time_var = ncd.variables[time_var_name]
        if hasattr(time_var, 'calendar'):
            calendar = time_var.calendar
        else:
            calendar = 'gregorian'
        start_nc_num = round(
            nc4.date2num(request.GET['starting'],
                         units=time_var.units,
                         calendar=calendar))
        end_nc_num = round(
            nc4.date2num(request.GET['ending'],
                         units=time_var.units,
                         calendar=calendar))

        all_times = time_var[:]
        start_nc_index = bisect.bisect_right(all_times, start_nc_num)
        end_nc_index = bisect.bisect_right(all_times, end_nc_num)

        try:
            all_times[start_nc_index]
        except IndexError:
            start_nc_index = all_times.size - 1
        try:
            all_times[end_nc_index]
        except IndexError:
            end_nc_index = all_times.size - 1

        if start_nc_index == end_nc_index:
            if start_nc_index > 0:
                start_nc_index -= 1
            elif end_nc_index < all_times.size:
                end_nc_index += 1
        return_dates = nc4.num2date(all_times[start_nc_index:end_nc_index],
                                    units=time_var.units,
                                    calendar=calendar)

        return geo_index, closest_x, closest_y, start_nc_index, end_nc_index, return_dates

    def __del__(self):
        self.close()

    def analyze_virtual_layers(self):
        with self.dataset() as nc:
            if nc is not None:
                # Earth Projected Sea Water Velocity
                u_names = [
                    'eastward_sea_water_velocity',
                    'eastward_sea_water_velocity_assuming_no_tide'
                ]
                v_names = [
                    'northward_sea_water_velocity',
                    'northward_sea_water_velocity_assuming_no_tide'
                ]
                us = nc.get_variables_by_attributes(
                    standard_name=lambda v: v in u_names)
                vs = nc.get_variables_by_attributes(
                    standard_name=lambda v: v in v_names)
                VirtualLayer.make_vector_layer(us, vs, 'sea_water_velocity',
                                               'vectors', self.id)

                # Grid projected Sea Water Velocity
                u_names = [
                    'x_sea_water_velocity', 'grid_eastward_sea_water_velocity'
                ]
                v_names = [
                    'y_sea_water_velocity', 'grid_northward_sea_water_velocity'
                ]
                us = nc.get_variables_by_attributes(
                    standard_name=lambda v: v in u_names)
                vs = nc.get_variables_by_attributes(
                    standard_name=lambda v: v in v_names)
                VirtualLayer.make_vector_layer(us, vs,
                                               'grid_sea_water_velocity',
                                               'vectors', self.id)

                # Earth projected Winds
                u_names = ['eastward_wind']
                v_names = ['northward_wind']
                us = nc.get_variables_by_attributes(
                    standard_name=lambda v: v in u_names)
                vs = nc.get_variables_by_attributes(
                    standard_name=lambda v: v in v_names)
                VirtualLayer.make_vector_layer(us, vs, 'winds', 'barbs',
                                               self.id)

                # Grid projected Winds
                u_names = ['x_wind', 'grid_eastward_wind']
                v_names = ['y_wind', 'grid_northward_wind']
                us = nc.get_variables_by_attributes(
                    standard_name=lambda v: v in u_names)
                vs = nc.get_variables_by_attributes(
                    standard_name=lambda v: v in v_names)
                VirtualLayer.make_vector_layer(us, vs, 'grid_winds', 'barbs',
                                               self.id)

                # Earth projected Ice velocity
                u_names = ['eastward_sea_ice_velocity']
                v_names = ['northward_sea_ice_velocity']
                us = nc.get_variables_by_attributes(
                    standard_name=lambda v: v in u_names)
                vs = nc.get_variables_by_attributes(
                    standard_name=lambda v: v in v_names)
                VirtualLayer.make_vector_layer(us, vs, 'sea_ice_velocity',
                                               'vectors', self.id)

    def process_layers(self):
        with self.dataset() as nc:
            if nc is not None:

                for v in nc.variables:
                    l, _ = Layer.objects.get_or_create(dataset_id=self.id,
                                                       var_name=v)

                    nc_var = nc.variables[v]
                    if hasattr(nc_var, 'valid_range'):
                        l.default_min = try_float(nc_var.valid_range[0])
                        l.default_max = try_float(nc_var.valid_range[-1])
                    # valid_min and valid_max take presendence
                    if hasattr(nc_var, 'valid_min'):
                        l.default_min = try_float(nc_var.valid_min)
                    if hasattr(nc_var, 'valid_max'):
                        l.default_max = try_float(nc_var.valid_max)

                    if hasattr(nc_var, 'standard_name'):
                        std_name = nc_var.standard_name
                        l.std_name = std_name

                        if len(nc_var.dimensions) > 1:
                            l.active = True

                    if hasattr(nc_var, 'long_name'):
                        l.description = nc_var.long_name

                    if hasattr(nc_var, 'units'):
                        l.units = nc_var.units

                    # Set some standard styles
                    l.styles = Style.defaults()
                    l.save()

        self.analyze_virtual_layers()

    def nearest_time(self, layer, time):
        """
        Return the time index and time value that is closest
        """
        with self.dataset() as nc:
            time_vars = nc.get_variables_by_attributes(standard_name='time')
            if len(time_vars) == 1:
                time_var = time_vars[0]
            else:
                # if there is more than variable with standard_name = time
                # fine the appropriate one to use with the layer
                var_obj = nc.variables[layer.access_name]
                time_var_name = find_appropriate_time(var_obj, time_vars)
                time_var = nc.variables[time_var_name]
            units = time_var.units
            if hasattr(time_var, 'calendar'):
                calendar = time_var.calendar
            else:
                calendar = 'gregorian'
            num_date = round(nc4.date2num(time, units=units,
                                          calendar=calendar))

            times = time_var[:]
            time_index = bisect.bisect_right(times, num_date)
            try:
                times[time_index]
            except IndexError:
                time_index -= 1
            return time_index, times[time_index]
Exemple #18
0
class NetCDFDataset(object):

    @contextmanager
    def dataset(self):
        try:
            # Dataset is already loaded
            self._dataset.variables
            yield self._dataset
        except AttributeError:
            try:
                self._dataset = EnhancedDataset(self.path())
                yield self._dataset
            except RuntimeError:
                try:
                    self._dataset = EnhancedMFDataset(self.path(), aggdim='time')
                    yield self._dataset
                except RuntimeError:
                    yield None

    @contextmanager
    def topology(self):
        try:
            self._topology.variables
            yield self._topology
        except AttributeError:
            try:
                self._topology = EnhancedDataset(self.topology_file)
                yield self._topology
            except RuntimeError:
                yield None

    def close(self):
        try:
            self._dataset.close()
        except BaseException:
            pass

        try:
            self._topology.close()
        except BaseException:
            pass

    def has_cache(self):
        return os.path.exists(self.topology_file)

    @property
    def topology_file(self):
        return os.path.join(settings.TOPOLOGY_PATH, '{}.nc'.format(self.safe_filename))

    @property
    def domain_file(self):
        return os.path.join(settings.TOPOLOGY_PATH, '{}.domain'.format(self.safe_filename))

    @property
    def node_tree_root(self):
        return os.path.join(settings.TOPOLOGY_PATH, '{}.nodes').format(self.safe_filename)

    @property
    def node_tree_data_file(self):
        return '{}.dat'.format(self.node_tree_root)

    @property
    def node_tree_index_file(self):
        return '{}.idx'.format(self.node_tree_root)

    @property
    def face_tree_root(self):
        return os.path.join(settings.TOPOLOGY_PATH, '{}.faces').format(self.safe_filename)

    @property
    def face_tree_data_file(self):
        return '{}.dat'.format(self.face_tree_root)

    @property
    def face_tree_index_file(self):
        return '{}.idx'.format(self.face_tree_root)

    def setup_getfeatureinfo(self, ncd, variable_object, request, location=None):

        location = location or 'face'

        try:
            latitude = request.GET['latitude']
            longitude = request.GET['longitude']
            # Find closest cell or node (only node for now)
            if location == 'face':
                tree = rtree.index.Index(self.face_tree_root)
            elif location == 'node':
                tree = rtree.index.Index(self.node_tree_root)
            else:
                raise NotImplementedError("No RTree for location '{}'".format(location))
            nindex = list(tree.nearest((longitude, latitude, longitude, latitude), 1, objects=True))[0]
            closest_x, closest_y = tuple(nindex.bbox[2:])
            geo_index = nindex.object
        except BaseException:
            raise
        finally:
            tree.close()

        # Get time indexes
        time_var_name = find_appropriate_time(variable_object, ncd.get_variables_by_attributes(standard_name='time'))
        time_var = ncd.variables[time_var_name]
        if hasattr(time_var, 'calendar'):
            calendar = time_var.calendar
        else:
            calendar = 'gregorian'
        start_nc_num = round(nc4.date2num(request.GET['starting'], units=time_var.units, calendar=calendar))
        end_nc_num = round(nc4.date2num(request.GET['ending'], units=time_var.units, calendar=calendar))

        all_times = time_var[:]
        start_nc_index = bisect.bisect_right(all_times, start_nc_num)
        end_nc_index = bisect.bisect_right(all_times, end_nc_num)

        try:
            all_times[start_nc_index]
        except IndexError:
            start_nc_index = all_times.size - 1
        try:
            all_times[end_nc_index]
        except IndexError:
            end_nc_index = all_times.size - 1

        if start_nc_index == end_nc_index:
            if start_nc_index > 0:
                start_nc_index -= 1
            elif end_nc_index < all_times.size:
                end_nc_index += 1
        return_dates = nc4.num2date(all_times[start_nc_index:end_nc_index], units=time_var.units, calendar=calendar)

        return geo_index, closest_x, closest_y, start_nc_index, end_nc_index, return_dates

    def __del__(self):
        self.close()

    def analyze_virtual_layers(self):
        with self.dataset() as nc:
            if nc is not None:
                # Earth Projected Sea Water Velocity
                u_names = ['eastward_sea_water_velocity', 'eastward_sea_water_velocity_assuming_no_tide']
                v_names = ['northward_sea_water_velocity', 'northward_sea_water_velocity_assuming_no_tide']
                us = nc.get_variables_by_attributes(standard_name=lambda v: v in u_names)
                vs = nc.get_variables_by_attributes(standard_name=lambda v: v in v_names)
                VirtualLayer.make_vector_layer(us, vs, 'sea_water_velocity', 'vectors', self.id)

                # Grid projected Sea Water Velocity
                u_names = ['x_sea_water_velocity', 'grid_eastward_sea_water_velocity']
                v_names = ['y_sea_water_velocity', 'grid_northward_sea_water_velocity']
                us = nc.get_variables_by_attributes(standard_name=lambda v: v in u_names)
                vs = nc.get_variables_by_attributes(standard_name=lambda v: v in v_names)
                VirtualLayer.make_vector_layer(us, vs, 'grid_sea_water_velocity', 'vectors', self.id)

                # Earth projected Winds
                u_names = ['eastward_wind']
                v_names = ['northward_wind']
                us = nc.get_variables_by_attributes(standard_name=lambda v: v in u_names)
                vs = nc.get_variables_by_attributes(standard_name=lambda v: v in v_names)
                #VirtualLayer.make_vector_layer(us, vs, 'winds', 'barbs', self.id)

                # Grid projected Winds
                u_names = ['x_wind', 'grid_eastward_wind']
                v_names = ['y_wind', 'grid_northward_wind']
                us = nc.get_variables_by_attributes(standard_name=lambda v: v in u_names)
                vs = nc.get_variables_by_attributes(standard_name=lambda v: v in v_names)
                #VirtualLayer.make_vector_layer(us, vs, 'grid_winds', 'barbs', self.id)

                # Earth projected Ice velocity
                u_names = ['eastward_sea_ice_velocity']
                v_names = ['northward_sea_ice_velocity']
                us = nc.get_variables_by_attributes(standard_name=lambda v: v in u_names)
                vs = nc.get_variables_by_attributes(standard_name=lambda v: v in v_names)
                VirtualLayer.make_vector_layer(us, vs, 'sea_ice_velocity', 'vectors', self.id)

    def process_layers(self):
        with self.dataset() as nc:
            if nc is not None:

                for v in nc.variables:
                    l, _ = Layer.objects.get_or_create(dataset_id=self.id, var_name=v)

                    nc_var = nc.variables[v]
                    if hasattr(nc_var, 'valid_range'):
                        l.default_min = try_float(nc_var.valid_range[0])
                        l.default_max = try_float(nc_var.valid_range[-1])
                    # valid_min and valid_max take presendence
                    if hasattr(nc_var, 'valid_min'):
                        l.default_min = try_float(nc_var.valid_min)
                    if hasattr(nc_var, 'valid_max'):
                        l.default_max = try_float(nc_var.valid_max)

                    if hasattr(nc_var, 'standard_name'):
                        std_name = nc_var.standard_name
                        l.std_name = std_name

                        if len(nc_var.dimensions) > 1:
                            l.active = True

                    if hasattr(nc_var, 'long_name'):
                        l.description = nc_var.long_name

                    if hasattr(nc_var, 'units'):
                        l.units = nc_var.units

                    # Set some standard styles
                    l.styles = Style.defaults()
                    l.save()

        self.analyze_virtual_layers()

    def nearest_time(self, layer, time):
        """
        Return the time index and time value that is closest
        """
        with self.dataset() as nc:
            time_vars = nc.get_variables_by_attributes(standard_name='time')
            if len(time_vars) == 1:
                time_var = time_vars[0]
            else:
                # if there is more than variable with standard_name = time
                # fine the appropriate one to use with the layer
                var_obj = nc.variables[layer.access_name]
                time_var_name = find_appropriate_time(var_obj, time_vars)
                time_var = nc.variables[time_var_name]
            units = time_var.units
            if hasattr(time_var, 'calendar'):
                calendar = time_var.calendar
            else:
                calendar = 'gregorian'
            num_date = round(nc4.date2num(time, units=units, calendar=calendar))

            times = time_var[:]
            time_index = bisect.bisect_right(times, num_date)
            try:
                times[time_index]
            except IndexError:
                time_index -= 1
            return time_index, times[time_index]
 def setUp(self):
     netcdf_file = os.path.join(os.path.dirname(__file__), "resources/coamps/cencoos_4km/wnd_tru/10m/coamps_cencoos_4km_wnd_tru_10m_2014-06-20-00_2014.171.00.nc")
     self.nc = EnhancedDataset(netcdf_file)
 def setUp(self):
     netcdf_file = os.path.join(os.path.dirname(__file__), "resources/coamps/cencoos_4km/wnd_tru/10m/coamps_cencoos_4km_wnd_tru_10m_2014-06-20-00_2014.171.00.nc")
     self.nc = EnhancedDataset(netcdf_file)