def test_run_chain_read_filter_kelvin_summarize(self): processor_list = [{ 'name': 'GridReadingProcessor', 'config': { 'latitude': 'lat', 'longitude': 'lon', 'time': 'time', 'variable_to_read': 'analysed_sst' } }, { 'name': 'EmptyTileFilter' }, { 'name': 'KelvinToCelsius' }, { 'name': 'TileSummarizingProcessor' }] processorchain = ProcessorChain(processor_list) test_file = path.join(path.dirname(__file__), 'datafiles', 'not_empty_mur.nc4') input_tile = nexusproto.NexusTile() tile_summary = nexusproto.TileSummary() tile_summary.granule = "file:%s" % test_file tile_summary.section_spec = "time:0:1,lat:0:10,lon:0:10" input_tile.summary.CopyFrom(tile_summary) results = list(processorchain.process(input_tile)) self.assertEqual(1, len(results))
def test_run_chain_read_filter_all(self): processor_list = [{ 'name': 'GridReadingProcessor', 'config': { 'latitude': 'lat', 'longitude': 'lon', 'time': 'time', 'variable_to_read': 'analysed_sst' } }, { 'name': 'EmptyTileFilter' }] processorchain = ProcessorChain(processor_list) test_file = path.join(path.dirname(__file__), 'datafiles', 'empty_mur.nc4') input_tile = nexusproto.NexusTile() tile_summary = nexusproto.TileSummary() tile_summary.granule = "file:%s" % test_file tile_summary.section_spec = "time:0:1,lat:0:10,lon:0:10" input_tile.summary.CopyFrom(tile_summary) gen = processorchain.process(input_tile) for message in gen: self.fail("Should not produce any messages. Message: %s" % message)
def test_read_not_empty_ascatb(self): test_file = path.join(path.dirname(__file__), 'datafiles', 'not_empty_ascatb.nc4') swath_reader = processors.SwathReadingProcessor('wind_speed', 'lat', 'lon', time='time') input_tile = nexusproto.NexusTile() tile_summary = nexusproto.TileSummary() tile_summary.granule = "file:%s" % test_file tile_summary.section_spec = "NUMROWS:0:1,NUMCELLS:0:82" input_tile.summary.CopyFrom(tile_summary) results = list(swath_reader.process(input_tile)) self.assertEqual(1, len(results)) for nexus_tile in results: self.assertTrue(nexus_tile.HasField('tile')) self.assertTrue(nexus_tile.tile.HasField('swath_tile')) self.assertEqual(0, len(nexus_tile.tile.swath_tile.meta_data)) tile = nexus_tile.tile.swath_tile self.assertEqual(82, from_shaped_array(tile.latitude).size) self.assertEqual(82, from_shaped_array(tile.longitude).size) tile1_data = np.ma.masked_invalid( from_shaped_array(results[0].tile.swath_tile.variable_data)) self.assertEqual((1, 82), tile1_data.shape) self.assertEqual(82, np.ma.count(tile1_data))
def new_nexus_tile(file_path, section_spec): nexus_tile = nexusproto.NexusTile() tile_summary = nexusproto.TileSummary() tile_summary.granule = file_path.split(sep)[-1] tile_summary.section_spec = section_spec nexus_tile.summary.CopyFrom(tile_summary) return nexus_tile
def test_read_empty_mur(self): test_file = path.join(path.dirname(__file__), 'datafiles', 'empty_mur.nc4') input_tile = nexusproto.NexusTile() tile_summary = nexusproto.TileSummary() tile_summary.granule = "file:%s" % test_file tile_summary.section_spec = "time:0:1,lat:0:10,lon:0:10" input_tile.summary.CopyFrom(tile_summary) results = list(self.module.process(input_tile)) self.assertEqual(1, len(results)) for nexus_tile in results: self.assertTrue(nexus_tile.HasField('tile')) self.assertTrue(nexus_tile.tile.HasField('grid_tile')) tile = nexus_tile.tile.grid_tile self.assertEqual(10, len(from_shaped_array(tile.latitude))) self.assertEqual(10, len(from_shaped_array(tile.longitude))) the_data = np.ma.masked_invalid( from_shaped_array(tile.variable_data)) self.assertEqual((1, 10, 10), the_data.shape) self.assertEqual(0, np.ma.count(the_data)) self.assertTrue(tile.HasField('time'))
def test_read_not_empty_smap(self): test_file = path.join(path.dirname(__file__), 'datafiles', 'not_empty_smap.h5') swath_reader = processors.SwathReadingProcessor( 'smap_sss', 'lat', 'lon', time='row_time', glblattr_day='REV_START_TIME', glblattr_day_format='%Y-%jT%H:%M:%S.%f') input_tile = nexusproto.NexusTile() tile_summary = nexusproto.TileSummary() tile_summary.granule = "file:%s" % test_file tile_summary.section_spec = "phony_dim_0:0:76,phony_dim_1:0:1" input_tile.summary.CopyFrom(tile_summary) results = list(swath_reader.process(input_tile)) self.assertEqual(1, len(results)) # with open('./smap_nonempty_nexustile.bin', 'w') as f: # f.write(results[0]) for nexus_tile in results: self.assertTrue(nexus_tile.HasField('tile')) self.assertTrue(nexus_tile.tile.HasField('swath_tile')) self.assertEqual(0, len(nexus_tile.tile.swath_tile.meta_data)) tile = nexus_tile.tile.swath_tile self.assertEqual(76, from_shaped_array(tile.latitude).size) self.assertEqual(76, from_shaped_array(tile.longitude).size) tile1_data = np.ma.masked_invalid( from_shaped_array(results[0].tile.swath_tile.variable_data)) self.assertEqual((76, 1), tile1_data.shape) self.assertEqual(43, np.ma.count(tile1_data)) self.assertAlmostEqual( -50.056, np.ma.min( np.ma.masked_invalid( from_shaped_array(results[0].tile.swath_tile.latitude))), places=3) self.assertAlmostEqual( -47.949, np.ma.max( np.ma.masked_invalid( from_shaped_array(results[0].tile.swath_tile.latitude))), places=3) self.assertEqual( 1427820162, np.ma.masked_invalid( from_shaped_array(results[0].tile.swath_tile.time))[0])
def test_read_not_empty_avhrr(self): test_file = path.join(path.dirname(__file__), 'datafiles', 'not_empty_avhrr.nc4') avhrr_reader = processors.GridReadingProcessor('analysed_sst', 'lat', 'lon', time='time') input_tile = nexusproto.NexusTile() tile_summary = nexusproto.TileSummary() tile_summary.granule = "file:%s" % test_file tile_summary.section_spec = "time:0:1,lat:0:10,lon:0:10" input_tile.summary.CopyFrom(tile_summary) results = list(avhrr_reader.process(input_tile)) self.assertEqual(1, len(results)) for nexus_tile in results: self.assertTrue(nexus_tile.HasField('tile')) self.assertTrue(nexus_tile.tile.HasField('grid_tile')) tile = nexus_tile.tile.grid_tile self.assertEqual(10, from_shaped_array(tile.latitude).size) self.assertEqual(10, from_shaped_array(tile.longitude).size) self.assertEqual((1, 10, 10), from_shaped_array(tile.variable_data).shape) tile1_data = np.ma.masked_invalid( from_shaped_array(results[0].tile.grid_tile.variable_data)) self.assertEqual(100, np.ma.count(tile1_data)) self.assertAlmostEqual( -39.875, np.ma.min( np.ma.masked_invalid( from_shaped_array(results[0].tile.grid_tile.latitude))), places=3) self.assertAlmostEqual( -37.625, np.ma.max( np.ma.masked_invalid( from_shaped_array(results[0].tile.grid_tile.latitude))), places=3) self.assertEqual(1462060800, results[0].tile.grid_tile.time) self.assertAlmostEqual( 289.71, np.ma.masked_invalid( from_shaped_array(results[0].tile.grid_tile.variable_data))[0, 0, 0], places=3)
def test_read_not_empty_wswm(self): test_file = path.join(path.dirname(__file__), 'datafiles', 'not_empty_wswm.nc') wswm_reader = processors.TimeSeriesReadingProcessor( 'Qout', 'lat', 'lon', 'time') input_tile = nexusproto.NexusTile() tile_summary = nexusproto.TileSummary() tile_summary.granule = "file:%s" % test_file tile_summary.section_spec = "time:0:1,rivid:0:500" input_tile.summary.CopyFrom(tile_summary) results = list(wswm_reader.process(input_tile)) self.assertEqual(1, len(results)) for nexus_tile in results: self.assertTrue(nexus_tile.HasField('tile')) self.assertTrue(nexus_tile.tile.HasField('time_series_tile')) tile = nexus_tile.tile.time_series_tile self.assertEqual(500, from_shaped_array(tile.latitude).size) self.assertEqual(500, from_shaped_array(tile.longitude).size) self.assertEqual((1, 500), from_shaped_array(tile.variable_data).shape) tile1_data = np.ma.masked_invalid( from_shaped_array(results[0].tile.time_series_tile.variable_data)) self.assertEqual(500, np.ma.count(tile1_data)) self.assertAlmostEqual( 41.390, np.ma.min( np.ma.masked_invalid( from_shaped_array( results[0].tile.time_series_tile.latitude))), places=3) self.assertAlmostEqual( 42.071, np.ma.max( np.ma.masked_invalid( from_shaped_array( results[0].tile.time_series_tile.latitude))), places=3) self.assertEqual(852098400, results[0].tile.time_series_tile.time) self.assertAlmostEqual( 0.009, np.ma.masked_invalid( from_shaped_array( results[0].tile.time_series_tile.variable_data))[0, 0], places=3)
def test_read_not_empty_ccmp(self): test_file = path.join(path.dirname(__file__), 'datafiles', 'not_empty_ccmp.nc') ccmp_reader = processors.GridReadingProcessor('uwnd', 'latitude', 'longitude', time='time', meta='vwnd') input_tile = nexusproto.NexusTile() tile_summary = nexusproto.TileSummary() tile_summary.granule = "file:%s" % test_file tile_summary.section_spec = "time:0:1,longitude:0:87,latitude:0:38" input_tile.summary.CopyFrom(tile_summary) results = list(ccmp_reader.process(input_tile)) self.assertEqual(1, len(results)) # with open('./ccmp_nonempty_nexustile.bin', 'w') as f: # f.write(results[0]) for nexus_tile in results: self.assertTrue(nexus_tile.HasField('tile')) self.assertTrue(nexus_tile.tile.HasField('grid_tile')) self.assertEqual(1, len(nexus_tile.tile.grid_tile.meta_data)) tile = nexus_tile.tile.grid_tile self.assertEqual(38, from_shaped_array(tile.latitude).size) self.assertEqual(87, from_shaped_array(tile.longitude).size) self.assertEqual((1, 38, 87), from_shaped_array(tile.variable_data).shape) tile1_data = np.ma.masked_invalid( from_shaped_array(results[0].tile.grid_tile.variable_data)) self.assertEqual(3306, np.ma.count(tile1_data)) self.assertAlmostEqual( -78.375, np.ma.min( np.ma.masked_invalid( from_shaped_array(results[0].tile.grid_tile.latitude))), places=3) self.assertAlmostEqual( -69.125, np.ma.max( np.ma.masked_invalid( from_shaped_array(results[0].tile.grid_tile.latitude))), places=3) self.assertEqual(1451606400, results[0].tile.grid_tile.time)
def run_processor_chain(): try: parameters = request.get_json() except Exception as e: raise BadRequest("Invalid JSON data") from e try: processor_list = parameters['processor_list'] except (KeyError, TypeError): raise BadRequest(description="processor_list is required.") try: chain = ProcessorChain(processor_list) except ProcessorNotFound as e: raise BadRequest("Unknown processor requested: %s" % e.missing_processor) from e except MissingProcessorArguments as e: raise BadRequest("%s missing required configuration options: %s" % (e.processor, e.missing_processor_args)) from e try: input_data = json_format.Parse(parameters['input_data'], nexusproto.NexusTile()) except ParseError as e: raise BadRequest( "input_data must be a NexusTile protobuf serialized as a string" ) from e result = next(chain.process(input_data), None) if isinstance(result, nexusproto.NexusTile): result = result.SerializeToString() return Response(result, mimetype='application/octet-stream')
def test_read_not_empty_mur(self): test_file = path.join(path.dirname(__file__), 'datafiles', 'not_empty_mur.nc4') input_tile = nexusproto.NexusTile() tile_summary = nexusproto.TileSummary() tile_summary.granule = "file:%s" % test_file tile_summary.section_spec = "time:0:1,lat:0:10,lon:0:10" input_tile.summary.CopyFrom(tile_summary) results = list(self.module.process(input_tile)) self.assertEqual(1, len(results)) tile1_data = np.ma.masked_invalid( from_shaped_array(results[0].tile.grid_tile.variable_data)) self.assertEqual((1, 10, 10), tile1_data.shape) self.assertEqual(100, np.ma.count(tile1_data))
def read_grid_data(self, section_spec_dataset): tile_specifications, file_path = parse_input(section_spec_dataset) # Time is optional for Grid data try: time = environ['TIME'] except KeyError: time = None with Dataset(file_path) as ds: for section_spec, dimtoslice in tile_specifications: tile = nexusproto.GridTile() tile.latitude.CopyFrom( to_shaped_array( numpy.ma.filled(ds[latitude][dimtoslice[latitude]], numpy.NaN))) tile.longitude.CopyFrom( to_shaped_array( numpy.ma.filled(ds[longitude][dimtoslice[longitude]], numpy.NaN))) # Before we read the data we need to make sure the dimensions are in the proper order so we don't have any # indexing issues ordered_slices = get_ordered_slices(ds, variable_to_read, dimtoslice) # Read data using the ordered slices, replacing masked values with NaN data_array = numpy.ma.filled( ds[variable_to_read][tuple(ordered_slices.itervalues())], numpy.NaN) tile.variable_data.CopyFrom(to_shaped_array(data_array)) if metadata is not None: tile.meta_data.add().CopyFrom( to_metadata( metadata, ds[metadata][tuple(ordered_slices.itervalues())])) if time is not None: timevar = ds[time] # Note assumption is that index of time is start value in dimtoslice tile.time = to_seconds_from_epoch( timevar[dimtoslice[time].start], timeunits=timevar.getncattr('units'), timeoffset=time_offset) nexus_tile = new_nexus_tile(file_path, section_spec) nexus_tile.tile.grid_tile.CopyFrom(tile) yield nexus_tile.SerializeToString() # If temp dir is defined, delete the temporary file if temp_dir is not None: remove(file_path)
def read_data(self, tile_specifications, file_path, output_tile): with Dataset(file_path) as ds: for section_spec, dimtoslice in tile_specifications: tile = nexusproto.SwathTile() # Time Lat Long Data and metadata should all be indexed by the same dimensions, order the incoming spec once using the data variable ordered_slices = get_ordered_slices(ds, self.variable_to_read, dimtoslice) tile.latitude.CopyFrom( to_shaped_array( numpy.ma.filled( ds[self.latitude][tuple(ordered_slices.values())], numpy.NaN))) tile.longitude.CopyFrom( to_shaped_array( numpy.ma.filled( ds[self.longitude][tuple(ordered_slices.values())], numpy.NaN))) timetile = ds[self.time][tuple([ ordered_slices[time_dim] for time_dim in ds[self.time].dimensions ])].astype('float64', casting='same_kind', copy=False) timeunits = ds[self.time].getncattr('units') try: start_of_day_date = datetime.datetime.strptime( ds.getncattr(self.start_of_day), self.start_of_day_pattern) except Exception: start_of_day_date = None for index in numpy.ndindex(timetile.shape): timetile[index] = to_seconds_from_epoch( timetile[index].item(), timeunits=timeunits, start_day=start_of_day_date, timeoffset=self.time_offset) tile.time.CopyFrom(to_shaped_array(timetile)) # Read the data converting masked values to NaN data_array = numpy.ma.filled( ds[self.variable_to_read][tuple(ordered_slices.values())], numpy.NaN) tile.variable_data.CopyFrom(to_shaped_array(data_array)) if self.metadata is not None: tile.meta_data.add().CopyFrom( to_metadata( self.metadata, ds[self.metadata][tuple(ordered_slices.values())])) output_tile.tile.swath_tile.CopyFrom(tile) yield output_tile
def process_nexus_tile(self, input_tile): tile_specifications, file_path = parse_input(input_tile, self.temp_dir) output_tile = nexusproto.NexusTile() output_tile.CopyFrom(input_tile) for tile in self.read_data(tile_specifications, file_path, output_tile): yield tile # If temp dir is defined, delete the temporary file if self.temp_dir is not None: remove(file_path)
def to_shaped_array(data_array): shaped_array = nexusproto.ShapedArray() shaped_array.shape.extend( [dimension_size for dimension_size in data_array.shape]) shaped_array.dtype = str(data_array.dtype) memfile = StringIO.StringIO() numpy.save(memfile, data_array) shaped_array.array_data = memfile.getvalue() memfile.close() return shaped_array
def read_data(self, tile_specifications, file_path, output_tile): with Dataset(file_path) as ds: for section_spec, dimtoslice in tile_specifications: tile = nexusproto.TimeSeriesTile() instance_dimension = next( iter([ dim for dim in ds[self.variable_to_read].dimensions if dim != self.time ])) tile.latitude.CopyFrom( to_shaped_array( numpy.ma.filled( ds[self.latitude][dimtoslice[instance_dimension]], numpy.NaN))) tile.longitude.CopyFrom( to_shaped_array( numpy.ma.filled( ds[self.longitude][dimtoslice[instance_dimension]], numpy.NaN))) # Before we read the data we need to make sure the dimensions are in the proper order so we don't # have any indexing issues ordered_slices = get_ordered_slices(ds, self.variable_to_read, dimtoslice) # Read data using the ordered slices, replacing masked values with NaN data_array = numpy.ma.filled( ds[self.variable_to_read][tuple(ordered_slices.values())], numpy.NaN) tile.variable_data.CopyFrom(to_shaped_array(data_array)) if self.metadata is not None: tile.meta_data.add().CopyFrom( to_metadata( self.metadata, ds[self.metadata][tuple(ordered_slices.values())])) timevar = ds[self.time] # Note assumption is that index of time is start value in dimtoslice tile.time = to_seconds_from_epoch( timevar[dimtoslice[self.time].start], timeunits=timevar.getncattr('units'), timeoffset=self.time_offset) output_tile.tile.time_series_tile.CopyFrom(tile) yield output_tile
def test_read_not_empty_ascatb_meta(self): # with open('./ascat_longitude_more_than_180.bin', 'w') as f: # results = list(self.module.read_swath_data(None, # "NUMROWS:0:1,NUMCELLS:0:82;NUMROWS:1:2,NUMCELLS:0:82;file:///Users/greguska/Downloads/ascat_longitude_more_than_180.nc4")) # f.write(results[0]) test_file = path.join(path.dirname(__file__), 'datafiles', 'not_empty_ascatb.nc4') swath_reader = processors.SwathReadingProcessor('wind_speed', 'lat', 'lon', time='time', meta='wind_dir') input_tile = nexusproto.NexusTile() tile_summary = nexusproto.TileSummary() tile_summary.granule = "file:%s" % test_file tile_summary.section_spec = "NUMROWS:0:1,NUMCELLS:0:82" input_tile.summary.CopyFrom(tile_summary) results = list(swath_reader.process(input_tile)) self.assertEqual(1, len(results)) for nexus_tile in results: self.assertTrue(nexus_tile.HasField('tile')) self.assertTrue(nexus_tile.tile.HasField('swath_tile')) self.assertLess(0, len(nexus_tile.tile.swath_tile.meta_data)) self.assertEqual(1, len(results[0].tile.swath_tile.meta_data)) tile1_meta_data = np.ma.masked_invalid( from_shaped_array( results[0].tile.swath_tile.meta_data[0].meta_data)) self.assertEqual((1, 82), tile1_meta_data.shape) self.assertEqual(82, np.ma.count(tile1_meta_data))
def read_swath_data(self, section_spec_dataset): tile_specifications, file_path = parse_input(section_spec_dataset) # Time is required for swath data time = environ['TIME'] with Dataset(file_path) as ds: for section_spec, dimtoslice in tile_specifications: tile = nexusproto.SwathTile() # Time Lat Long Data and metadata should all be indexed by the same dimensions, order the incoming spec once using the data variable ordered_slices = get_ordered_slices(ds, variable_to_read, dimtoslice) tile.latitude.CopyFrom( to_shaped_array(ds[latitude][tuple( ordered_slices.itervalues())])) tile.longitude.CopyFrom( to_shaped_array(ds[longitude][tuple( ordered_slices.itervalues())])) timeunits = ds[time].getncattr('units') timetile = ds[time][tuple(ordered_slices.itervalues())] for index in numpy.ndindex(timetile.shape): timetile[index] = to_seconds_from_epoch( timetile[index].item(), timeunits) tile.time.CopyFrom(to_shaped_array(timetile)) # Read the data converting masked values to NaN data_array = numpy.ma.filled( ds[variable_to_read][tuple(ordered_slices.itervalues())], numpy.NaN) tile.variable_data.CopyFrom(to_shaped_array(data_array)) if metadata is not None: tile.meta_data.add().CopyFrom( to_metadata( metadata, ds[metadata][tuple(ordered_slices.itervalues())])) nexus_tile = new_nexus_tile(file_path, section_spec) nexus_tile.tile.swath_tile.CopyFrom(tile) yield nexus_tile.SerializeToString() # If temp dir is defined, delete the temporary file if temp_dir is not None: remove(file_path)
def summarize_nexustile(self, tiledata): nexus_tile = parse_input(tiledata) the_tile_type = nexus_tile.tile.WhichOneof("tile_type") the_tile_data = getattr(nexus_tile.tile, the_tile_type) latitudes = numpy.ma.masked_invalid(from_shaped_array(the_tile_data.latitude)) longitudes = numpy.ma.masked_invalid(from_shaped_array(the_tile_data.longitude)) data = from_shaped_array(the_tile_data.variable_data) if nexus_tile.HasField("summary"): tilesummary = nexus_tile.summary else: tilesummary = nexusproto.TileSummary() tilesummary.bbox.lat_min = numpy.nanmin(latitudes).item() tilesummary.bbox.lat_max = numpy.nanmax(latitudes).item() tilesummary.bbox.lon_min = numpy.nanmin(longitudes).item() tilesummary.bbox.lon_max = numpy.nanmax(longitudes).item() tilesummary.stats.min = numpy.nanmin(data).item() tilesummary.stats.max = numpy.nanmax(data).item() tilesummary.stats.mean = numpy.nanmean(data).item() tilesummary.stats.count = data.size - numpy.count_nonzero(numpy.isnan(data)) try: min_time, max_time = find_time_min_max(the_tile_data) tilesummary.stats.min_time = min_time tilesummary.stats.max_time = max_time except NoTimeException: pass try: tilesummary.data_var_name = var_name except NameError: pass nexus_tile.summary.CopyFrom(tilesummary) yield nexus_tile.SerializeToString()
def to_metadata(name, data_array): metadata = nexusproto.MetaData() metadata.name = name metadata.meta_data.CopyFrom(to_shaped_array(data_array)) return metadata
def summarize_nexustile(self, tiledata): nexus_tile = parse_input(tiledata) the_tile_type = nexus_tile.tile.WhichOneof("tile_type") the_tile_data = getattr(nexus_tile.tile, the_tile_type) latitudes = numpy.ma.masked_invalid( from_shaped_array(the_tile_data.latitude)) longitudes = numpy.ma.masked_invalid( from_shaped_array(the_tile_data.longitude)) data = from_shaped_array(the_tile_data.variable_data) if nexus_tile.HasField("summary"): tilesummary = nexus_tile.summary else: tilesummary = nexusproto.TileSummary() tilesummary.bbox.lat_min = numpy.nanmin(latitudes).item() tilesummary.bbox.lat_max = numpy.nanmax(latitudes).item() tilesummary.bbox.lon_min = numpy.nanmin(longitudes).item() tilesummary.bbox.lon_max = numpy.nanmax(longitudes).item() tilesummary.stats.min = numpy.nanmin(data).item() tilesummary.stats.max = numpy.nanmax(data).item() # In order to accurately calculate the average we need to weight the data based on the cosine of its latitude # This is handled slightly differently for swath vs. grid data if the_tile_type == 'swath_tile': # For Swath tiles, len(data) == len(latitudes) == len(longitudes). So we can simply weight each element in the # data array tilesummary.stats.mean = numpy.ma.average( numpy.ma.masked_invalid(data), weights=numpy.cos(numpy.radians(latitudes))).item() elif the_tile_type == 'grid_tile': # Grid tiles need to repeat the weight for every longitude # TODO This assumes data axis' are ordered as latitude x longitude tilesummary.stats.mean = numpy.ma.average( numpy.ma.masked_invalid(data).flatten(), weights=numpy.cos( numpy.radians(numpy.repeat(latitudes, len(longitudes))))).item() else: # Default to simple average with no weighting tilesummary.stats.mean = numpy.nanmean(data).item() tilesummary.stats.count = data.size - numpy.count_nonzero( numpy.isnan(data)) try: min_time, max_time = find_time_min_max(the_tile_data) tilesummary.stats.min_time = min_time tilesummary.stats.max_time = max_time except NoTimeException: pass try: tilesummary.data_var_name = var_name except NameError: pass nexus_tile.summary.CopyFrom(tilesummary) yield nexus_tile.SerializeToString()