def test_multiple_file_var_operations(self): # check if get and set the numpy matrix. root = nc.open('unittest0*.nc')[0] var = nc.getvar(root, 'data') self.assertEquals(var.__class__, nc.DistributedNCVariable) self.assertEquals(var[:].__class__, np.ndarray) tmp = var[:] var[:] = var[:] + 1 nc.close(root) # check if value was saved into the file. root = nc.open('unittest0*.nc')[0] var = nc.getvar(root, 'data') self.assertTrue(var, tmp + 1) nc.close(root)
def initialize(radiance_filename, radiation_filename, callback=lambda r: r): ref, _ = nc.open(radiation_filename) ref_radiation = nc.getvar(ref, 'globalradiation') with nc.loader(radiance_filename) as radiance_root: radiance = nc.getvar(radiance_root, 'radiance', source=ref_radiation) radiance[0, :] = callback(radiance[0, :]) nc.close(ref)
def test_multiple_file_new_var_operations(self): # check if create a new var. root = nc.open('unittest0*.nc')[0] var = nc.getvar(root, 'new_variable', 'f4', ('time', 'yc', 'xc'), digits=3, fill_value=1.0) self.assertEquals(var.__class__, nc.DistributedNCVariable) self.assertEquals(var[:].__class__, np.ndarray) tmp = var[:] var[:] = var[:] + 1 nc.close(root) # check if value was saved into the files. root = nc.open('unittest00.nc')[0] var = nc.getvar(root, 'new_variable') self.assertTrue(var, tmp + 1) nc.close(root)
def test_get_not_existing_dim_multiple_file(self): # check if get the dimension in a single file. root = nc.open('unittest0*.nc')[0] self.assertFalse(root.has_dimension('the_12th_dimension')) self.assertEquals(len(nc.getdim(root, 'the_12th_dimension', 123)), 5) self.assertTrue(root.has_dimension('the_12th_dimension')) nc.close(root)
def test_character_variables_in_multiple_file(self): # check if get and set the numpy string matrix in multiple files. root = nc.open('unittest0*.nc')[0] var = nc.getvar(root, 'auditTrail') self.assertEquals(var.shape, (5, 2, 80)) result = np.vstack([[self.auditTrail] for i in range(5)]) self.assertEquals(var, result) for i in range(5): result[i, i % 2].data[0:6] = 'CHANGE' var[i, i % 2, 0:6] = np.array(list('CHANGE')) self.assertEquals(var, result) nc.close(root) # check if was writed to each file. root = nc.open('unittest0*.nc')[0] var = nc.getvar(root, 'auditTrail') self.assertEquals(var, result) nc.close(root)
def test_character_variables_in_single_file(self): # check if get and set the numpy string matrix in single files. root = nc.open('unittest00.nc')[0] var = nc.getvar(root, 'auditTrail') self.assertEquals(var.shape, (1, 2, 80)) self.assertEquals(var, self.auditTrail) self.auditTrail[:].data[0:6] = 'CHANGE' var[0, 0:6] = np.array(list('CHANGE')) self.assertEquals(var, self.auditTrail) nc.close(root)
def test_get_existing_var_single_file(self): # check if get the variable in a single file. root = nc.open('unittest00.nc')[0] self.assertNotIn('data', root.variables) var = nc.getvar(root, 'data') self.assertEquals(var.shape, (1, 100, 200)) self.assertIn('data', root.variables) are_equals = (var[:] == np.zeros(var.shape) + 1.) self.assertTrue(are_equals.all()) nc.close(root)
def test_get_existing_var_multiple_file(self): # check if get the variable with multiples files. root = nc.open('unittest0*.nc')[0] self.assertNotIn('data', root.variables) var = nc.getvar(root, 'data') self.assertEquals(var.shape, (5, 100, 200)) self.assertIn('data', root.variables) are_equals = (var[:] == self.data) self.assertTrue(are_equals.all()) nc.close(root)
def test_multiple_file_new_var_operations(self): # check if create a new var. root = nc.open('unittest0*.nc')[0] var = nc.getvar(root, 'new_variable', 'f4', ('time', 'yc', 'xc'), digits=3, fill_value=1.0) self.assertTrue((var[:] == 1.0).all()) self.assertEquals(var.__class__, nc.DistributedNCVariable) self.assertEquals(var[:].__class__, np.ndarray) tmp = var[:] var[:] = var[:] + 1 nc.close(root) # check if value was saved into the files. root = nc.open('unittest00.nc')[0] var = nc.getvar(root, 'new_variable') self.assertEquals(var, tmp + 1) nc.close(root)
def test_open_close_multiple_files(self): # check if open the pattern selection using using a package instance. root, is_new = nc.open('unittest0*.nc') self.assertEquals(root.files, ['unittest0%i.nc' % i for i in range(5)]) self.assertEquals(root.pattern, 'unittest0*.nc') self.assertEquals(len(root.roots), 5) self.assertFalse(is_new) self.assertFalse(root.read_only) # check if close the package with all the files. nc.close(root) with self.assertRaisesRegexp(RuntimeError, u'NetCDF: Not a valid ID'): nc.close(root)
def test_open_close_existent_file(self): # check if open an existent file. root, is_new = nc.open('unittest00.nc') self.assertEquals(root.files, ['unittest00.nc']) self.assertEquals(root.pattern, 'unittest00.nc') self.assertEquals(len(root.roots), 1) self.assertFalse(is_new) self.assertFalse(root.read_only) # check if close an existent file. nc.close(root) with self.assertRaisesRegexp(RuntimeError, u'NetCDF: Not a valid ID'): nc.close(root)
def import_measurement(year, month, filename, stations): root, _ = nc.open(filename) measurements = nc.clonevar(root, 'globalradiation', 'measurements') for name in stations: # input_filename = '/home/adrian/Desktop/heliosat2/Datos_horarios/' + name + '_' + year + month + '_H.txt' input_filename = '/home/santiago/GERSOLAR/GOES/Datos_horarios/' + name + '_' + year + month + '_H.txt' stations_index = stations.index(name) stations_index = stations.index(name) rows = from_txt(input_filename, utc_diff = -3, timestamp_col = 0, channel = 1, skip_rows = 1) rows2netcdf(rows, root, measurements, stations_index) nc.close(root)
def __init__(self, filenames, tile_cut={}): # At first it should have: lat, lon, dem, linke self.root, is_new = nc.open("static.nc") if is_new: logging.info("This is the first execution from the deployment... ") with nc.loader(filenames[0]) as root_ref: self.lat = nc.getvar(root_ref, "lat") self.lon = nc.getvar(root_ref, "lon") nc.getvar(self.root, "lat", source=self.lat) nc.getvar(self.root, "lon", source=self.lon) self.project_dem() self.project_linke() nc.sync(self.root) self.root = nc.tailor(self.root, dimensions=tile_cut)
def __init__(self, filenames): # At first it should have: lat, lon, dem, linke self.root, is_new = nc.open('static.nc') if is_new: logging.info("This is the first execution from the deployment... ") with nc.loader(filenames[0]) as root_ref: self.lat = nc.getvar(root_ref, 'lat') self.lon = nc.getvar(root_ref, 'lon') nc.getvar(self.root, 'lat', source=self.lat) nc.getvar(self.root, 'lon', source=self.lon) self.project_dem() self.project_linke() nc.sync(self.root) self.root = nc.tailor(self.root, dimensions=DIMS)
def test_get_non_existing_var_multiple_file(self): # check if get the variable with multiples files. root = nc.open('unittest0*.nc')[0] self.assertNotIn('new_variable', root.variables) var = nc.getvar(root, 'new_variable', 'f4', ('time', 'yc', 'xc'), digits=3, fill_value=1.2) self.assertEquals(var.shape, (5, 100, 200)) self.assertIn('new_variable', root.variables) ref = np.zeros(var.shape) + 1.2 # the comparison is true if the error is less than 0.002 are_equals = (var[:] - ref) < 0.002 self.assertTrue(are_equals.all()) nc.close(root)
def test_get_var_copy_from_source(self): root = nc.open('unittest0*.nc')[0] if os.path.isfile('unittest_destiny.nc'): os.remove('unittest_destiny.nc') root_d = nc.open('unittest_destiny.nc')[0] # check if getvar copy a variable from a complex file to a simple file. var_source = nc.getvar(root, 'data') var = nc.getvar(root_d, 'data_copy', source=var_source) self.assertEquals(var, var_source) # check if getvar copy a variable from a simple file to a complex file. var_distributed = nc.getvar(root, 'data_copy', source=var) self.assertEquals(var, var_distributed) # check if getvar copy changing the vtype to a simple file. var_int = nc.getvar(root_d, 'data_int', 'i4', source=var_source) self.assertEquals(var_source.vtype, 'f4') self.assertEquals(var_int.vtype, 'i4') diff = var_source[:] - var_int[:] self.assertTrue((diff < 1).all()) # check if getvar copy changing the vtype to a multiple file. var_distributed_int = nc.getvar(root, 'data_int', 'i4', source=var) self.assertEquals(var_distributed.vtype, 'f4') self.assertEquals(var_distributed_int.vtype, 'i4') diff = var_distributed[:] - var_distributed_int[:] self.assertTrue((diff < 1).all())
def test_open_close_readonly_file(self): # set the file to be readonly. filename = 'ro_unittest.nc' if os.path.isfile(filename): os.chmod(filename, stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH) # check if create and open a new file. root, is_new = nc.open(filename) self.assertEquals(root.files, [filename]) self.assertEquals(root.pattern, filename) self.assertEquals(len(root.roots), 1) self.assertFalse(is_new) self.assertTrue(root.read_only) # check if close the readonly file. nc.close(root) with self.assertRaisesRegexp(RuntimeError, u'NetCDF: Not a valid ID'): nc.close(root)
def test_open_close_new_file(self): # delete the filename from the system. filename = 'unittest-1.nc' if os.path.isfile(filename): os.remove(filename) # check if create and open a new file. root, is_new = nc.open(filename) self.assertEquals(root.files, ['unittest-1.nc']) self.assertEquals(root.pattern, 'unittest-1.nc') self.assertEquals(len(root.roots), 1) self.assertTrue(is_new) self.assertFalse(root.read_only) # check if close the created file. nc.close(root) with self.assertRaisesRegexp(RuntimeError, u'NetCDF: Not a valid ID'): nc.close(root)
def radiance(filename): prefix = short(filename, 0, 3) slot = int(round(decimalhour(to_datetime(filename))*2)) suffix = short(filename, 4, 6) output_filename = create_output_path(prefix, slot, suffix) root, is_new = nc.open(filename) radiation = nc.getvar(root, 'globalradiation') with nc.loader(output_filename) as radiance_root: dims_names = list(reversed(radiation.dimensions.keys())) dims_values = list(reversed(radiation.dimensions.values())) create_dims = (lambda name, dimension: radiance_root.create_dimension(name, len(dimension))) (map(lambda name, dimension: create_dims(name, dimension), dims_names, dims_values)) radiance = (nc.getvar(radiance_root, 'radiance', vtype='f4', dimensions=tuple(dims_names))) radiance[:] = radiation[:]*30.*60.*10**-6
def interpolate_radiance(radiance_files, radiance_filename): before = search_closest(radiance_files, radiance_filename, lambda s: s - 1) after = search_closest(radiance_files, radiance_filename, lambda s: s + 1) extrems = filter(lambda x: x, [before, after]) if extrems: ref_filename = max(extrems) files = map(lambda e: rev_key[e], extrems) root, is_new = nc.open(files) radiation = nc.getvar(root, 'globalradiation') if len(extrems) > 1: radiation = np.average(radiation[:], axis=0, weights=calculate_weights(radiance_filename, files)) else: radiation = radiation[:].mean() initialize(radiance_filename, rev_key[ref_filename], lambda r: radiation * TO_MJRAD) nc.close(root)
def test_open_close_multiple_files_with_readonly_restriction(self): # check the files are NOT read only. filenames = map(lambda i: 'unittest0%i.nc' % i, range(5)) can_write = map(lambda f: os.access(f, os.W_OK), filenames) self.assertTrue(all(can_write)) # check if open the pattern selection using using a package instance. root, is_new = nc.open('unittest0*.nc', read_only=True) self.assertEquals(root.files, ['unittest0%i.nc' % i for i in range(5)]) self.assertEquals(root.pattern, 'unittest0*.nc') self.assertEquals(len(root.roots), 5) self.assertFalse(is_new) self.assertTrue(root.read_only) with self.assertRaisesRegexp(Exception, u'NetCDF: Write to read only'): var = nc.getvar(root, 'data') var[:] = 0 # check if close the package with all the files. nc.close(root) with self.assertRaisesRegexp(RuntimeError, u'NetCDF: Not a valid ID'): nc.close(root)
def radiance(filename): prefix = short(filename, 0, 3) slot = int(round(decimalhour(to_datetime(filename)) * 2)) suffix = short(filename, 4, 6) output_filename = create_output_path(prefix, slot, suffix) root, is_new = nc.open(filename) radiation = nc.getvar(root, 'globalradiation') with nc.loader(output_filename) as radiance_root: dims_names = list(reversed(radiation.dimensions.keys())) dims_values = list(reversed(radiation.dimensions.values())) create_dims = (lambda name, dimension: radiance_root.create_dimension( name, len(dimension))) (map(lambda name, dimension: create_dims(name, dimension), dims_names, dims_values)) radiance = (nc.getvar(radiance_root, 'radiance', vtype='f4', dimensions=tuple(dims_names))) radiance[:] = radiation[:] * 30. * 60. * 10**-6
def interpolate_radiance(radiance_files, radiance_filename): before = search_closest(radiance_files, radiance_filename, lambda s: s - 1) after = search_closest(radiance_files, radiance_filename, lambda s: s + 1) extrems = filter(lambda x: x, [before, after]) if extrems: ref_filename = max(extrems) files = map(lambda e: rev_key[e], extrems) root, is_new = nc.open(files) radiation = nc.getvar(root, 'globalradiation') if len(extrems) > 1: radiation = np.average(radiation[:], axis=0, weights=calculate_weights( radiance_filename, files)) else: radiation = radiation[:].mean() initialize(radiance_filename, rev_key[ref_filename], lambda r: radiation * TO_MJRAD) nc.close(root)
def test_open_close_file_with_readonly_restriction(self): # check the file is NOT read only. filename = 'unittest00.nc' can_write = os.access(filename, os.W_OK) self.assertTrue(can_write) # check if open an existent file. root, is_new = nc.open('unittest00.nc', read_only=True) self.assertEquals(root.files, ['unittest00.nc']) self.assertEquals(root.pattern, 'unittest00.nc') self.assertEquals(len(root.roots), 1) self.assertFalse(is_new) self.assertTrue(root.read_only) with self.assertRaisesRegexp(Exception, u'NetCDF: Write to read only'): var = nc.getvar(root, 'data') var[:] = 0 # check if close an existent file. nc.close(root) with self.assertRaisesRegexp(RuntimeError, u'NetCDF: Not a valid ID'): nc.close(root)
def test_open_unexistent_file(self): with self.assertRaisesRegexp( Exception, u'There is not file list or ' 'pattern to open.'): nc.open([])
# https://pypi.org/project/python-dwca-reader/ pip install python-dwca-reader # example usage from dwca.read import DwCAReader with DwCAReader('gbif-results.zip') as dwca: print("Core data file is: {}".format(dwca.descriptor.core.file_location)) # => 'occurrence.txt' # creates a Pandas dataframe core_df = dwca.pd_read('occurrence.txt', parse_dates=True) ## NETCDF # https://pypi.org/project/netcdf/ pip install netcdf # example usage from netcdf import netcdf as nc root, is_new = nc.open('file_*.nc') data = nc.getvar(root, 'data') print("Matrix values: ", data[:]) ## GEOTIFF (requires GDAL package) # https://pypi.org/project/georasters/ pip install georasters # example usage import georasters as gr raster = './data/slope.tif' data = gr.from_file(raster) ## KML, Shapefiles, Esri Geodatabase, Raster # https://pypi.org/project/geopandas/ pip install geopandas # https://pypi.org/project/Fiona/
'Parana':[-31.84894, -60.536117], 'Balcarce':[-37.763199, -58.297519], 'Pergamino':[-33.944332, -60.568668], 'MarcosJuarez':[-32.568348, -62.082349], 'Azul':[-36.766174, -59.881312], 'Villegas':[-34.8696, -62.7790], 'Barrow':[-38.184, -60.129], 'Ceilap':[-34.567, -58.5], 'Concepcion':[-32.483, -58.233] } names = ['Anguil', 'Azul', 'Barrow', 'Concepcion', 'Lujan', 'MarcosJuarez', 'Parana', 'Villegas'] pos = [s[n] for n in names] print pos from heliosat.main import workwith directory = '/home/santiago/GERSOLAR/git/solar_radiation_model/imagedownloader/' cut_positions(directory + 'clone_M12.nc', 0, pos) workwith(directory + 'cut_positions.clone_M12.nc') import_measurement('2013', '12', directory + 'cut_positions.clone_M12.nc', names) from libs.statistics import error root, _ = nc.open(directory + 'cut_positions.clone_M12.nc') error.dailyerrors(root, names)
def test_get_existing_dim_multiple_file(self): # check if get the dimension in a single file. root = nc.open('unittest0*.nc')[0] self.assertEquals(len(nc.getdim(root, 'time')), 5) nc.close(root)
def test_open_unexistent_file(self): with self.assertRaisesRegexp(Exception, u'There is not file list or ' 'pattern to open.'): nc.open([])