Exemple #1
0
 def __exit__(self, type, value, traceback):
     self.initial_size = len(self.initial_files)
     final_size = len(glob.glob('%s/*.nc' % self.directory))
     files = self.files
     if files:
         # Test if it downloaded something.
         self.assertNotEquals(final_size, self.initial_size)
         the_size = final_size - self.initial_size
         self.assertEquals(len(files), the_size)
         # Test if it calibrated the downloaded files.
         with nc.loader(files) as root:
             counts = nc.getvar(root, 'counts_shift')
             self.assertEquals(counts.shape, (the_size, 1, 1))
             self.assertEquals(counts[0], 32.)
             space = nc.getvar(root, 'space_measurement')
             self.assertEquals(space.shape, (the_size, 1, 1))
             self.assertEquals(space[0], 29.)
             prelaunch_0 = nc.getvar(root, 'prelaunch_0')
             self.assertEquals(prelaunch_0.shape, (the_size, 1, 1))
             self.assertAlmostEqual(prelaunch_0[0], 0.61180001, 6)
             prelaunch_1 = nc.getvar(root, 'prelaunch_1')
             self.assertEquals(prelaunch_1.shape, (the_size, 1, 1))
             self.assertAlmostEqual(prelaunch_1[0], 0.00116, 6)
             postlaunch = nc.getvar(root, 'postlaunch')
             self.assertEquals(postlaunch.shape, (the_size, 1, 1))
def initialize(radiance_filename, radiation_filename, callback=lambda r: r):
    ref, _ = nc.open(radiation_filename)
    ref_radiation = nc.getvar(ref, 'globalradiation')
    with nc.loader(radiance_filename) as radiance_root:
        radiance = nc.getvar(radiance_root, 'radiance', source=ref_radiation)
        radiance[0, :] = callback(radiance[0, :])
    nc.close(ref)
def initialize(radiance_filename, radiation_filename, callback=lambda r: r):
    ref, _ = nc.open(radiation_filename)
    ref_radiation = nc.getvar(ref, 'globalradiation')
    with nc.loader(radiance_filename) as radiance_root:
        radiance = nc.getvar(radiance_root, 'radiance', source=ref_radiation)
        radiance[0, :] = callback(radiance[0, :])
    nc.close(ref)
 def verify_output(self, files, output, config):
     tested = map(lambda f:
                  self.translate_file('tests/products/estimated', f),
                  files)
     with nc.loader(tested, self.tile_cut) as old:
         valid = nc.getvar(old, 'globalradiation')
         max_vaild = valid[:].max()
         # It allow a 1% of the maximum value as the maximum error
         # threshold.
         threshold = max_vaild * 0.01
         calculated = output.globalradiation
         if config['product']:
             products = map(lambda f:
                            self.translate_file('products/estimated', f),
                            files)
             with nc.loader(products, self.tile_cut) as new:
                 calculated = nc.getvar(new, 'globalradiation')
         gtz = lambda m: m[calculated[:] >= 0]
         diff = gtz(calculated[:] - valid[:])
         print('thr: {:}'.format(threshold))
         print('min: {:} ({:})'.format(gtz(calculated[:]).min(),
                                       gtz(valid[:]).min()))
         print('max: {:} ({:})'.format(gtz(calculated[:]).max(),
                                       gtz(valid[:]).max()))
         self.assertTrue((diff < threshold).all())
         shape = valid.shape
     return shape
 def obtain_data(self, y, x):
     now = localize(datetime.utcnow())
     now = now.replace(year=2015, month=2, day=17)
     today_pattern = path + 'goes13.%Y.%j.*.BAND_01.nc'
     files = glob(now.strftime(today_pattern))
     if not files:
         return []
     with nc.loader(files) as root:
         time = nc.getvar(root, 'time')[:,0]
         data = nc.getvar(root, 'globalradiation')[:,y,x]
     return zip(time.tolist(), data.tolist())
 def verify_output(self):
     with nc.loader('tests/products/estimated/*.nc', DIMS) as old_root:
         with nc.loader('products/estimated/*.nc', DIMS) as new_root:
             valid = nc.getvar(old_root, 'globalradiation')
             max_vaild = valid[:].max()
             # It allow a 1% of the maximum value as the maximum error
             # threshold.
             threshold = max_vaild * 0.01
             calculated = nc.getvar(new_root, 'globalradiation')
             gtz = lambda m: m[calculated[:] >= 0]
             diff = gtz(calculated[:] - valid[:])
             print 'min: ', gtz(calculated[:]).min(), '(', gtz(valid[:]).min(), ')'
             print 'max: ', gtz(calculated[:]).max(), '(', gtz(valid[:]).max(), ')'
             self.assertTrue((diff < threshold).all())
Exemple #7
0
 def test_multiple_file_var_operations(self):
     # check if get and set the numpy matrix.
     root = nc.open('unittest0*.nc')[0]
     var = nc.getvar(root, 'data')
     self.assertEquals(var.__class__, nc.DistributedNCVariable)
     self.assertEquals(var[:].__class__, np.ndarray)
     tmp = var[:]
     var[:] = var[:] + 1
     nc.close(root)
     # check if value was saved into the file.
     root = nc.open('unittest0*.nc')[0]
     var = nc.getvar(root, 'data')
     self.assertTrue(var, tmp + 1)
     nc.close(root)
Exemple #8
0
 def test_multiple_file_var_operations(self):
     # check if get and set the numpy matrix.
     root = nc.open('unittest0*.nc')[0]
     var = nc.getvar(root, 'data')
     self.assertEquals(var.__class__, nc.DistributedNCVariable)
     self.assertEquals(var[:].__class__, np.ndarray)
     tmp = var[:]
     var[:] = var[:] + 1
     nc.close(root)
     # check if value was saved into the file.
     root = nc.open('unittest0*.nc')[0]
     var = nc.getvar(root, 'data')
     self.assertTrue(var, tmp + 1)
     nc.close(root)
 def linke(self):
     if not hasattr(self, '_cached_linke'):
         self._linke = nc.getvar(self.static_cached, 'linke')
         linke_list = map(lambda dt: self._linke[0,dt.month - 1,:][0,:],
                          map(to_datetime, self.filenames))
         self._cached_linke = np.vstack(linke_list)
     return self._cached_linke
Exemple #10
0
 def getvar(self, *args, **kwargs):
     name = args[0]
     if name not in self._attrs.keys():
         tmp = list(args)
         tmp.insert(0, self.cache.root)
         self._attrs[name] = nc.getvar(*tmp, **kwargs)
     return self._attrs[name]
Exemple #11
0
 def test_multiple_file_new_var_operations(self):
     # check if create a new var.
     root = nc.open('unittest0*.nc')[0]
     var = nc.getvar(root, 'new_variable',
                     'f4', ('time', 'yc', 'xc'),
                     digits=3, fill_value=1.0)
     self.assertEquals(var.__class__, nc.DistributedNCVariable)
     self.assertEquals(var[:].__class__, np.ndarray)
     tmp = var[:]
     var[:] = var[:] + 1
     nc.close(root)
     # check if value was saved into the files.
     root = nc.open('unittest00.nc')[0]
     var = nc.getvar(root, 'new_variable')
     self.assertTrue(var, tmp + 1)
     nc.close(root)
Exemple #12
0
 def test_character_variables_in_multiple_file(self):
     # check if get and set the numpy string matrix in multiple files.
     root = nc.open('unittest0*.nc')[0]
     var = nc.getvar(root, 'auditTrail')
     self.assertEquals(var.shape, (5, 2, 80))
     result = np.vstack([[self.auditTrail] for i in range(5)])
     self.assertEquals(var, result)
     for i in range(5):
         result[i, i % 2].data[0:6] = 'CHANGE'
         var[i, i % 2, 0:6] = np.array(list('CHANGE'))
     self.assertEquals(var, result)
     nc.close(root)
     # check if was writed to each file.
     root = nc.open('unittest0*.nc')[0]
     var = nc.getvar(root, 'auditTrail')
     self.assertEquals(var, result)
     nc.close(root)
def calibrate(filename):
    """
    Append the calibration parameters as variables of the netcdf file.

    Keyword arguments:
    filename -- the name of a netcdf file.
    """
    params = calibration_to(filename)
    with nc.loader(filename) as root:
        for key, value in params.items():
            nc.getdim(root, 'xc_1', 1)
            nc.getdim(root, 'yc_1', 1)
            if isinstance(value, list):
                for i in range(len(value)):
                    nc.getvar(root, '%s_%i' % (key, i), 'f4', ('time', 'yc_1', 'xc_1' ))[:] = value[i]
            else:
                nc.getvar(root, key, 'f4', ('time', 'yc_1', 'xc_1'))[:] = value
Exemple #14
0
 def test_character_variables_in_multiple_file(self):
     # check if get and set the numpy string matrix in multiple files.
     root = nc.open('unittest0*.nc')[0]
     var = nc.getvar(root, 'auditTrail')
     self.assertEquals(var.shape, (5, 2, 80))
     result = np.vstack([[self.auditTrail] for i in range(5)])
     self.assertEquals(var, result)
     for i in range(5):
         result[i, i % 2].data[0:6] = 'CHANGE'
         var[i, i % 2, 0:6] = np.array(list('CHANGE'))
     self.assertEquals(var, result)
     nc.close(root)
     # check if was writed to each file.
     root = nc.open('unittest0*.nc')[0]
     var = nc.getvar(root, 'auditTrail')
     self.assertEquals(var, result)
     nc.close(root)
def radiance(filename):
    prefix = short(filename, 0, 3)
    slot = int(round(decimalhour(to_datetime(filename))*2))
    suffix = short(filename, 4, 6)
    output_filename = create_output_path(prefix, slot, suffix)
    root, is_new = nc.open(filename)
    radiation = nc.getvar(root, 'globalradiation')
    with nc.loader(output_filename) as radiance_root:
        dims_names = list(reversed(radiation.dimensions.keys()))
        dims_values = list(reversed(radiation.dimensions.values()))
        create_dims = (lambda name, dimension:
                       radiance_root.create_dimension(name, len(dimension)))
        (map(lambda name, dimension: create_dims(name, dimension),
             dims_names, dims_values))
        radiance = (nc.getvar(radiance_root, 'radiance', vtype='f4',
                              dimensions=tuple(dims_names)))
        radiance[:] = radiation[:]*30.*60.*10**-6
Exemple #16
0
 def test_character_variables_in_single_file(self):
     # check if get and set the numpy string matrix in single files.
     root = nc.open('unittest00.nc')[0]
     var = nc.getvar(root, 'auditTrail')
     self.assertEquals(var.shape, (1, 2, 80))
     self.assertEquals(var, self.auditTrail)
     self.auditTrail[:].data[0:6] = 'CHANGE'
     var[0, 0:6] = np.array(list('CHANGE'))
     self.assertEquals(var, self.auditTrail)
     nc.close(root)
 def __getattr__(self, name):
     if name not in self._attrs.keys():
         var_name = name[4:] if name[0:4] == "ref_" else name
         if "ref_%s" % var_name not in self._attrs.keys():
             var = nc.getvar(self.root, var_name)
             self._attrs["ref_%s" % var_name] = var
         else:
             var = self._attrs
         self._attrs[var_name] = var[:]
     return self._attrs[name]
 def project_linke(self):
     logging.info("Projecting Linke's turbidity index... ")
     dts = map(lambda m: datetime(2014, m, 15), range(1, 13))
     linkes = map(lambda dt: linke.obtain(dt, compressed=True), dts)
     linkes = map(lambda l: linke.transform_data(l, self.lat[0], self.lon[0]), linkes)
     linkes = np.vstack([[linkes]])
     nc.getdim(self.root, "months", 12)
     linke_var = nc.getvar(self.root, "linke", "f4", ("months", "yc", "xc"))
     # The linkes / 20. uncompress the linke coefficients and save them as floats.
     linke_var[:] = linkes / 20.0
Exemple #19
0
def radiance(filename):
    prefix = short(filename, 0, 3)
    slot = int(round(decimalhour(to_datetime(filename)) * 2))
    suffix = short(filename, 4, 6)
    output_filename = create_output_path(prefix, slot, suffix)
    root, is_new = nc.open(filename)
    radiation = nc.getvar(root, 'globalradiation')
    with nc.loader(output_filename) as radiance_root:
        dims_names = list(reversed(radiation.dimensions.keys()))
        dims_values = list(reversed(radiation.dimensions.values()))
        create_dims = (lambda name, dimension: radiance_root.create_dimension(
            name, len(dimension)))
        (map(lambda name, dimension: create_dims(name, dimension), dims_names,
             dims_values))
        radiance = (nc.getvar(radiance_root,
                              'radiance',
                              vtype='f4',
                              dimensions=tuple(dims_names)))
        radiance[:] = radiation[:] * 30. * 60. * 10**-6
Exemple #20
0
 def __getattr__(self, name):
     if name not in self._attrs.keys():
         var_name = name[4:] if name[0:4] == 'ref_' else name
         if 'ref_%s' % var_name not in self._attrs.keys():
             var = nc.getvar(self.root, var_name)
             self._attrs['ref_%s' % var_name] = var
         else:
             var = self._attrs
         self._attrs[var_name] = var[:]
     return self._attrs[name]
 def test_calibrate(self):
     instrument.calibrate('goes13.2014.124.123422.BAND_01.nc')
     with nc.loader('goes13.2014.124.123422.BAND_01.nc') as root:
         var = lambda v: float(nc.getvar(root, v)[0])
         self.assertAlmostEquals(var('space_measurement'), 29.0)
         self.assertAlmostEquals(var('postlaunch'), 1.293)
         self.assertAlmostEquals(var('counts_shift'), 32.0)
         # 0: radiance, 1: albedo
         self.assertAlmostEquals(var('prelaunch_0'), 0.6118)
         self.assertAlmostEquals(var('prelaunch_1'), 0.00116)
Exemple #22
0
 def test_character_variables_in_single_file(self):
     # check if get and set the numpy string matrix in single files.
     root = nc.open('unittest00.nc')[0]
     var = nc.getvar(root, 'auditTrail')
     self.assertEquals(var.shape, (1, 2, 80))
     self.assertEquals(var, self.auditTrail)
     self.auditTrail[:].data[0:6] = 'CHANGE'
     var[0, 0:6] = np.array(list('CHANGE'))
     self.assertEquals(var, self.auditTrail)
     nc.close(root)
Exemple #23
0
 def test_get_existing_var_multiple_file(self):
     # check if get the variable with multiples files.
     root = nc.open('unittest0*.nc')[0]
     self.assertNotIn('data', root.variables)
     var = nc.getvar(root, 'data')
     self.assertEquals(var.shape, (5, 100, 200))
     self.assertIn('data', root.variables)
     are_equals = (var[:] == self.data)
     self.assertTrue(are_equals.all())
     nc.close(root)
Exemple #24
0
 def test_get_existing_var_single_file(self):
     # check if get the variable in a single file.
     root = nc.open('unittest00.nc')[0]
     self.assertNotIn('data', root.variables)
     var = nc.getvar(root, 'data')
     self.assertEquals(var.shape, (1, 100, 200))
     self.assertIn('data', root.variables)
     are_equals = (var[:] == np.zeros(var.shape) + 1.)
     self.assertTrue(are_equals.all())
     nc.close(root)
Exemple #25
0
 def test_multiple_file_new_var_operations(self):
     # check if create a new var.
     root = nc.open('unittest0*.nc')[0]
     var = nc.getvar(root,
                     'new_variable',
                     'f4', ('time', 'yc', 'xc'),
                     digits=3,
                     fill_value=1.0)
     self.assertTrue((var[:] == 1.0).all())
     self.assertEquals(var.__class__, nc.DistributedNCVariable)
     self.assertEquals(var[:].__class__, np.ndarray)
     tmp = var[:]
     var[:] = var[:] + 1
     nc.close(root)
     # check if value was saved into the files.
     root = nc.open('unittest00.nc')[0]
     var = nc.getvar(root, 'new_variable')
     self.assertEquals(var, tmp + 1)
     nc.close(root)
Exemple #26
0
 def test_get_existing_var_multiple_file(self):
     # check if get the variable with multiples files.
     root = nc.open('unittest0*.nc')[0]
     self.assertNotIn('data', root.variables)
     var = nc.getvar(root, 'data')
     self.assertEquals(var.shape, (5, 100, 200))
     self.assertIn('data', root.variables)
     are_equals = (var[:] == self.data)
     self.assertTrue(are_equals.all())
     nc.close(root)
Exemple #27
0
 def project_linke(self):
     logging.info("Projecting Linke's turbidity index... ")
     dts = map(lambda m: datetime(2014, m, 15), range(1, 13))
     linkes = map(lambda dt: linke.obtain(dt, compressed=True), dts)
     linkes = map(lambda l: linke.transform_data(l, self.lat[0],
                                                 self.lon[0]), linkes)
     linkes = np.vstack([[linkes]])
     nc.getdim(self.root, 'months', 12)
     linke_var = nc.getvar(self.root, 'linke', 'f4', ('months', 'yc', 'xc'))
     # The linkes / 20. uncompress the linke coefficients and save them as floats.
     linke_var[:] = linkes / 20.
 def project_linke(cls, root, lat, lon):
     logging.info("Projecting Linke's turbidity index... ")
     dts = map(lambda m: datetime(2014, m, 15), range(1, 13))
     linkes = map(lambda dt: linke.obtain(dt, compressed=True), dts)
     linkes = map(lambda l: linke.transform_data(l, lat[0],
                                                 lon[0]), linkes)
     linkes = np.vstack([[linkes]])
     nc.getdim(root, 'months', 12)
     linke_var = nc.getvar(root, 'linke', 'f4', ('months', 'yc', 'xc'))
     # The linkes / 20. uncompress the linke coefficients and save them as
     # floats.
     linke_var[:] = linkes / 20.
def rmse(root, index):
	times = [ datetime.utcfromtimestamp(int(t)) for t in nc.getvar(root, 'time')[:] ]
	days = [ t.date() for t in times ]
	days.sort()
	days_index = [d.day for d in set(days)]
	days_amount = len(days_index)
	nc.getdim(root, 'diarying', days_amount)
	nc.sync(root)
	measurements = nc.getvar(root, 'measurements')
	estimated = nc.getvar(root, 'globalradiation')
	error_diff = nc.getvar(root, 'errordiff', 'f4', ('time', 'yc_cut', 'xc_cut',), 4)
	error = nc.getvar(root, 'error', 'f4', ('time','yc_cut','xc_cut',), 4)
	diary_error = nc.getvar(root, 'diaryerror', 'f4', ('diarying', 'yc_cut', 'xc_cut',), 4)
	error_diff[:] = np.zeros(estimated.shape)
	error[:] = np.zeros(estimated.shape)
	diary_error[:] = np.zeros((days_amount, estimated.shape[1], estimated.shape[2]))
	nc.sync(root)
	#the_max = measurements[:].max()
	error_diff[:, index, :] = measurements[:,index,:] - estimated[:,index,:]
	error[:, index, :] = np.abs(error_diff[:, index, :])
	nc.sync(root)
	max_value_in_day = np.zeros([days_amount]) + 1
	for i in range(len(days)):
		d_i = days_index.index(days[i].day)
		max_value_in_day[d_i] = measurements[i,index,0] if max_value_in_day[d_i] < measurements[i,index,0] else max_value_in_day[d_i]
		diary_error[d_i, index,:] += np.array([ error_diff[i,index,0] ** 2,1])
	count = diary_error[:, index, 1]
	count[count == 0] = 1
	diary_error[:, index,0] = np.sqrt(diary_error[:, index, 0] / count)
	diary_error[:, index,1] = diary_error[:, index,0] /	max_value_in_day * 100
Exemple #30
0
def rmse(root, index):
    times = [
        datetime.utcfromtimestamp(int(t)) for t in nc.getvar(root, 'time')[:]
    ]
    days = [t.date() for t in times]
    days.sort()
    days_index = [d.day for d in set(days)]
    days_amount = len(days_index)
    nc.getdim(root, 'diarying', days_amount)
    nc.sync(root)
    measurements = nc.getvar(root, 'measurements')
    estimated = nc.getvar(root, 'globalradiation')
    error_diff = nc.getvar(root, 'errordiff', 'f4', (
        'time',
        'yc_cut',
        'xc_cut',
    ), 4)
    error = nc.getvar(root, 'error', 'f4', (
        'time',
        'yc_cut',
        'xc_cut',
    ), 4)
    diary_error = nc.getvar(root, 'diaryerror', 'f4', (
        'diarying',
        'yc_cut',
        'xc_cut',
    ), 4)
    error_diff[:] = np.zeros(estimated.shape)
    error[:] = np.zeros(estimated.shape)
    diary_error[:] = np.zeros(
        (days_amount, estimated.shape[1], estimated.shape[2]))
    nc.sync(root)
    #the_max = measurements[:].max()
    error_diff[:,
               index, :] = measurements[:, index, :] - estimated[:, index, :]
    error[:, index, :] = np.abs(error_diff[:, index, :])
    nc.sync(root)
    max_value_in_day = np.zeros([days_amount]) + 1
    for i in range(len(days)):
        d_i = days_index.index(days[i].day)
        max_value_in_day[d_i] = measurements[
            i, index, 0] if max_value_in_day[d_i] < measurements[
                i, index, 0] else max_value_in_day[d_i]
        diary_error[d_i, index, :] += np.array([error_diff[i, index, 0]**2, 1])
    count = diary_error[:, index, 1]
    count[count == 0] = 1
    diary_error[:, index, 0] = np.sqrt(diary_error[:, index, 0] / count)
    diary_error[:, index,
                1] = diary_error[:, index, 0] / max_value_in_day * 100
    show("\rDiary RMS error: %.2f" % (diary_error[:, index, 1]).mean())
    for i in range(len(days)):
        d_i = days_index.index(days[i].day)
        error[i, index, 1] = error[i, index, 1] / max_value_in_day[d_i] * 100
    result = np.sum(error[:, index, 1]**2)
    result = np.sqrt(result / error.shape[0])
    show("Half-hour RMS error: %.2f \n" % result)
    #diary_error[:, index,1] = diary_error[:, index,0]
    nc.sync(root)
    nc.close(root)
Exemple #31
0
 def test_get_non_existing_var_multiple_file(self):
     # check if get the variable with multiples files.
     root = nc.open('unittest0*.nc')[0]
     self.assertNotIn('new_variable', root.variables)
     var = nc.getvar(root, 'new_variable',
                     'f4', ('time', 'yc', 'xc'),
                     digits=3, fill_value=1.2)
     self.assertEquals(var.shape, (5, 100, 200))
     self.assertIn('new_variable', root.variables)
     ref = np.zeros(var.shape) + 1.2
     # the comparison is true if the error is less than 0.002
     are_equals = (var[:] - ref) < 0.002
     self.assertTrue(are_equals.all())
     nc.close(root)
Exemple #32
0
 def test_get_var_copy_from_source(self):
     root = nc.open('unittest0*.nc')[0]
     if os.path.isfile('unittest_destiny.nc'):
         os.remove('unittest_destiny.nc')
     root_d = nc.open('unittest_destiny.nc')[0]
     # check if getvar copy a variable from a complex file to a simple file.
     var_source = nc.getvar(root, 'data')
     var = nc.getvar(root_d, 'data_copy', source=var_source)
     self.assertEquals(var, var_source)
     # check if getvar copy a variable from a simple file to a complex file.
     var_distributed = nc.getvar(root, 'data_copy', source=var)
     self.assertEquals(var, var_distributed)
     # check if getvar copy changing the vtype to a simple file.
     var_int = nc.getvar(root_d, 'data_int', 'i4', source=var_source)
     self.assertEquals(var_source.vtype, 'f4')
     self.assertEquals(var_int.vtype, 'i4')
     diff = var_source[:] - var_int[:]
     self.assertTrue((diff < 1).all())
     # check if getvar copy changing the vtype to a multiple file.
     var_distributed_int = nc.getvar(root, 'data_int', 'i4', source=var)
     self.assertEquals(var_distributed.vtype, 'f4')
     self.assertEquals(var_distributed_int.vtype, 'i4')
     diff = var_distributed[:] - var_distributed_int[:]
     self.assertTrue((diff < 1).all())
Exemple #33
0
def rows2netcdf(rows, root, measurements, index):
	#root, is_new = nc.open(filename)
	#if not is_new:
		slots = nc.getvar(root, 'slots')
		times = [ datetime.utcfromtimestamp(int(t)) for t in nc.getvar(root, 'time') ]
		instant_radiation = rows2slots(rows,2)
		earth_failures = 0
		i_e = 0
		i_m = 0
		while i_e < len(times) and i_m < len(instant_radiation):
			# When date estimated before date measured
			if times[i_e].date() < instant_radiation[i_m][1][0].date():
				i_e += 1
			# When date estimated after date measured
			elif times[i_e].date() > instant_radiation[i_m][1][0].date():
				i_m += 1
			else:
				if slots[i_e] < instant_radiation[i_m][0]:
					# TODO: This should be completed with a 0 error from the estimation.
					measurements[i_e, index,:] = np.array([0, 0])
					earth_failures += 1
					i_e += 1
				elif slots[i_e] > instant_radiation[i_m][0]:
					i_m += 1
				else:
					value = instant_radiation[i_m][1][1]
					#row_in_slot = instant_radiation[i_m][1][2]
					measurements[i_e, index,:] = np.array([value, value])
					i_e += 1
					i_m += 1
		while i_e < len(times):
			# TODO: This should be completed with a 0 error from the estimation.
			measurements[i_e, index,:] = np.array([0, 0])
			earth_failures += 1
			i_e += 1
		print("Detected %i of %i estimated times without earth measure.\n" % (earth_failures, len(slots)))
Exemple #34
0
 def test_get_var_copy_from_source(self):
     root = nc.open('unittest0*.nc')[0]
     if os.path.isfile('unittest_destiny.nc'):
         os.remove('unittest_destiny.nc')
     root_d = nc.open('unittest_destiny.nc')[0]
     # check if getvar copy a variable from a complex file to a simple file.
     var_source = nc.getvar(root, 'data')
     var = nc.getvar(root_d, 'data_copy', source=var_source)
     self.assertEquals(var, var_source)
     # check if getvar copy a variable from a simple file to a complex file.
     var_distributed = nc.getvar(root, 'data_copy', source=var)
     self.assertEquals(var, var_distributed)
     # check if getvar copy changing the vtype to a simple file.
     var_int = nc.getvar(root_d, 'data_int', 'i4', source=var_source)
     self.assertEquals(var_source.vtype, 'f4')
     self.assertEquals(var_int.vtype, 'i4')
     diff = var_source[:] - var_int[:]
     self.assertTrue((diff < 1).all())
     # check if getvar copy changing the vtype to a multiple file.
     var_distributed_int = nc.getvar(root, 'data_int', 'i4', source=var)
     self.assertEquals(var_distributed.vtype, 'f4')
     self.assertEquals(var_distributed_int.vtype, 'i4')
     diff = var_distributed[:] - var_distributed_int[:]
     self.assertTrue((diff < 1).all())
Exemple #35
0
 def test_get_non_existing_var_multiple_file(self):
     # check if get the variable with multiples files.
     root = nc.open('unittest0*.nc')[0]
     self.assertNotIn('new_variable', root.variables)
     var = nc.getvar(root,
                     'new_variable',
                     'f4', ('time', 'yc', 'xc'),
                     digits=3,
                     fill_value=1.2)
     self.assertEquals(var.shape, (5, 100, 200))
     self.assertIn('new_variable', root.variables)
     ref = np.zeros(var.shape) + 1.2
     # the comparison is true if the error is less than 0.002
     are_equals = (var[:] - ref) < 0.002
     self.assertTrue(are_equals.all())
     nc.close(root)
 def construct(cls, static_file, ref_filename):
     # At first it should have: lat, lon, dem, linke
     logging.info("This is the first execution from the deployment... ")
     with nc.loader(ref_filename) as root_ref:
         with nc.loader(static_file) as root:
             lat = nc.getvar(root_ref, 'lat')
             lon = nc.getvar(root_ref, 'lon')
             nc.getvar(root, 'lat', source=lat)
             nc.getvar(root, 'lon', source=lon)
             cls.project_dem(root, lat, lon)
             cls.project_linke(root, lat, lon)
 def test_load_calibration_using_netcdf_package(self):
     instrument.calibrate('goes13.2014.124.123422.BAND_01.nc')
     array_g = lambda x: [x for i in range(5)]
     with nc.loader(array_g('goes13.2014.124.123422.BAND_01.nc')) as root:
         var = lambda v: map(float, nc.getvar(root, v)[:])
         for i in range(5):
             self.assertAlmostEquals(var('space_measurement')[i],
                                     array_g(29.0)[i])
             self.assertAlmostEquals(var('postlaunch')[i],
                                     array_g(1.293)[i])
             self.assertAlmostEquals(var('counts_shift')[i],
                                     array_g(32.0)[i])
             # 0: radiance, 1: albedo
             self.assertAlmostEquals(var('prelaunch_0')[i],
                                     array_g(0.6118)[i])
             self.assertAlmostEquals(var('prelaunch_1')[i],
                                     array_g(0.00116)[i])
def interpolate_radiance(radiance_files, radiance_filename):
    before = search_closest(radiance_files, radiance_filename, lambda s: s - 1)
    after = search_closest(radiance_files, radiance_filename, lambda s: s + 1)
    extrems = filter(lambda x: x, [before, after])
    if extrems:
        ref_filename = max(extrems)
        files = map(lambda e: rev_key[e], extrems)
        root, is_new = nc.open(files)
        radiation = nc.getvar(root, 'globalradiation')
        if len(extrems) > 1:
            radiation = np.average(radiation[:], axis=0,
                                   weights=calculate_weights(radiance_filename,
                                                             files))
        else:
            radiation = radiation[:].mean()
        initialize(radiance_filename, rev_key[ref_filename],
                   lambda r: radiation * TO_MJRAD)
        nc.close(root)
Exemple #39
0
def only_incompleted(url, destfolder):
    dest = calculate_destiny(url, destfolder)
    completed = False
    if os.path.exists(dest):
        try:
            with nc.loader(dest) as root:
                nc.getvar(root, 'data')
                nc.getvar(root, 'lat')
                nc.getvar(root, 'lon')
            completed = True
        except (OSError, IOError, Exception):
            logger.error("The file %s was broken." % dest)
    return not completed
Exemple #40
0
def only_incompleted(url, destfolder):
    dest = calculate_destiny(url, destfolder)
    completed = False
    if os.path.exists(dest):
        try:
            with nc.loader(dest) as root:
                nc.getvar(root, 'data')
                nc.getvar(root, 'lat')
                nc.getvar(root, 'lon')
            completed = True
        except (OSError, IOError, Exception):
            logger.error("The file %s was broken." % dest)
    return not completed
Exemple #41
0
 def test_open_close_multiple_files_with_readonly_restriction(self):
     # check the files are NOT read only.
     filenames = map(lambda i: 'unittest0%i.nc' % i, range(5))
     can_write = map(lambda f: os.access(f, os.W_OK), filenames)
     self.assertTrue(all(can_write))
     # check if open the pattern selection using using a package instance.
     root, is_new = nc.open('unittest0*.nc', read_only=True)
     self.assertEquals(root.files, ['unittest0%i.nc' % i for i in range(5)])
     self.assertEquals(root.pattern, 'unittest0*.nc')
     self.assertEquals(len(root.roots), 5)
     self.assertFalse(is_new)
     self.assertTrue(root.read_only)
     with self.assertRaisesRegexp(Exception, u'NetCDF: Write to read only'):
         var = nc.getvar(root, 'data')
         var[:] = 0
     # check if close the package with all the files.
     nc.close(root)
     with self.assertRaisesRegexp(RuntimeError, u'NetCDF: Not a valid ID'):
         nc.close(root)
Exemple #42
0
 def test_open_close_file_with_readonly_restriction(self):
     # check the file is NOT read only.
     filename = 'unittest00.nc'
     can_write = os.access(filename, os.W_OK)
     self.assertTrue(can_write)
     # check if open an existent file.
     root, is_new = nc.open('unittest00.nc', read_only=True)
     self.assertEquals(root.files, ['unittest00.nc'])
     self.assertEquals(root.pattern, 'unittest00.nc')
     self.assertEquals(len(root.roots), 1)
     self.assertFalse(is_new)
     self.assertTrue(root.read_only)
     with self.assertRaisesRegexp(Exception, u'NetCDF: Write to read only'):
         var = nc.getvar(root, 'data')
         var[:] = 0
     # check if close an existent file.
     nc.close(root)
     with self.assertRaisesRegexp(RuntimeError, u'NetCDF: Not a valid ID'):
         nc.close(root)
def interpolate_radiance(radiance_files, radiance_filename):
    before = search_closest(radiance_files, radiance_filename, lambda s: s - 1)
    after = search_closest(radiance_files, radiance_filename, lambda s: s + 1)
    extrems = filter(lambda x: x, [before, after])
    if extrems:
        ref_filename = max(extrems)
        files = map(lambda e: rev_key[e], extrems)
        root, is_new = nc.open(files)
        radiation = nc.getvar(root, 'globalradiation')
        if len(extrems) > 1:
            radiation = np.average(radiation[:],
                                   axis=0,
                                   weights=calculate_weights(
                                       radiance_filename, files))
        else:
            radiation = radiation[:].mean()
        initialize(radiance_filename, rev_key[ref_filename],
                   lambda r: radiation * TO_MJRAD)
        nc.close(root)
Exemple #44
0
 def test_open_close_file_with_readonly_restriction(self):
     # check the file is NOT read only.
     filename = 'unittest00.nc'
     can_write = os.access(filename, os.W_OK)
     self.assertTrue(can_write)
     # check if open an existent file.
     root, is_new = nc.open('unittest00.nc', read_only=True)
     self.assertEquals(root.files, ['unittest00.nc'])
     self.assertEquals(root.pattern, 'unittest00.nc')
     self.assertEquals(len(root.roots), 1)
     self.assertFalse(is_new)
     self.assertTrue(root.read_only)
     with self.assertRaisesRegexp(Exception, u'NetCDF: Write to read only'):
         var = nc.getvar(root, 'data')
         var[:] = 0
     # check if close an existent file.
     nc.close(root)
     with self.assertRaisesRegexp(RuntimeError, u'NetCDF: Not a valid ID'):
         nc.close(root)
Exemple #45
0
 def test_open_close_multiple_files_with_readonly_restriction(self):
     # check the files are NOT read only.
     filenames = map(lambda i: 'unittest0%i.nc' % i, range(5))
     can_write = map(lambda f: os.access(f, os.W_OK), filenames)
     self.assertTrue(all(can_write))
     # check if open the pattern selection using using a package instance.
     root, is_new = nc.open('unittest0*.nc', read_only=True)
     self.assertEquals(root.files, ['unittest0%i.nc' % i for i in range(5)])
     self.assertEquals(root.pattern, 'unittest0*.nc')
     self.assertEquals(len(root.roots), 5)
     self.assertFalse(is_new)
     self.assertTrue(root.read_only)
     with self.assertRaisesRegexp(Exception, u'NetCDF: Write to read only'):
         var = nc.getvar(root, 'data')
         var[:] = 0
     # check if close the package with all the files.
     nc.close(root)
     with self.assertRaisesRegexp(RuntimeError, u'NetCDF: Not a valid ID'):
         nc.close(root)
Exemple #46
0
 def __init__(self, filenames):
     # At first it should have: lat, lon, dem, linke
     self.root, is_new = nc.open('static.nc')
     if is_new:
         logging.info("This is the first execution from the deployment... ")
         with nc.loader(filenames[0]) as root_ref:
             self.lat = nc.getvar(root_ref, 'lat')
             self.lon = nc.getvar(root_ref, 'lon')
             nc.getvar(self.root, 'lat', source=self.lat)
             nc.getvar(self.root, 'lon', source=self.lon)
             self.project_dem()
             self.project_linke()
             nc.sync(self.root)
     self.root = nc.tailor(self.root, dimensions=DIMS)
 def __init__(self, filenames, tile_cut={}):
     # At first it should have: lat, lon, dem, linke
     self.root, is_new = nc.open("static.nc")
     if is_new:
         logging.info("This is the first execution from the deployment... ")
         with nc.loader(filenames[0]) as root_ref:
             self.lat = nc.getvar(root_ref, "lat")
             self.lon = nc.getvar(root_ref, "lon")
             nc.getvar(self.root, "lat", source=self.lat)
             nc.getvar(self.root, "lon", source=self.lon)
             self.project_dem()
             self.project_linke()
             nc.sync(self.root)
     self.root = nc.tailor(self.root, dimensions=tile_cut)
def dailyerrors(root, stations):
	times = [ datetime.fromtimestamp(int(t)) for t in nc.getvar(root, 'time')[:] ]
	days = [ t.date() for t in times ]
	days.sort()
	days_index = [d.day for d in set(days)]
	days_index.sort()
	days_amount = len(days_index)
	nc.getdim(root, 'diarying', days_amount)
	nc.sync(root)
	measurements = nc.getvar(root, 'measurements')
	estimated = nc.getvar(root, 'globalradiation')
	error_diff = nc.getvar(root, 'errordiff', 'f4', ('time', 'yc_cut', 'xc_cut',), 4)
	RMS_daily_error = nc.getvar(root, 'RMSdailyerror', 'f4', ('diarying', 'yc_cut', 'xc_cut',), 4)
	BIAS_daily_error = nc.getvar(root, 'BIASdailyerror', 'f4', ('diarying', 'yc_cut', 'xc_cut',), 4)
	error_diff[:] = np.zeros(estimated.shape)
	RMS_daily_error[:] = np.zeros((days_amount, estimated.shape[1], estimated.shape[2]))
	BIAS_daily_error[:] = np.zeros((days_amount, estimated.shape[1], estimated.shape[2]))
	nc.sync(root)
	for s in stations:
		index = stations.index(s)
		show('Station: %s \n' % stations[index])
		error_diff[:, index, :] = measurements[:, index, :] - estimated[:, index, :]
		nc.sync(root)
		sum_value_in_day = np.zeros((days_amount))
		for i in range(len(days)):
			d_i = days_index.index(days[i].day)
			if not measurements[i, index, 0] == 0.0:
				sum_value_in_day[d_i] += measurements[i,index ,0]
				RMS_daily_error[d_i, index ,:] += np.array([ error_diff[i, index ,0] ** 2,1])
				BIAS_daily_error[d_i, index ,:] +=  error_diff[i, index ,0]
		count = RMS_daily_error[:, index, 1]
		count[count == 0] = 1
		RMS_daily_error[:, index,0] = np.sqrt(RMS_daily_error[:, index, 0] / count) / sum_value_in_day * 100
		BIAS_daily_error[:, index,0] = (BIAS_daily_error[:, index, 0] / count) / sum_value_in_day * 100
		RMS_daily_error[:, index,1] = RMS_daily_error[:, index,0]
		BIAS_daily_error[:, index,1] = BIAS_daily_error[:, index,0]
		print 'RMS :', RMS_daily_error[:, index, 0]
		print 'BIAS', BIAS_daily_error[:, index, 0]
		print 'sum value in day: ', sum_value_in_day[:]
		show("\rDiary RMS daily error: %.2f\n" % (RMS_daily_error[:, index, 0]).mean())

	nc.sync(root)
	nc.close(root)
Exemple #49
0
 def project_dem(self):
     logging.info("Projecting DEM's map... ")
     dem_var = nc.getvar(self.root, 'dem', 'f4', source=self.lon)
     dem_var[:] = dem.obtain(self.lat[0], self.lon[0])
Exemple #50
0
def dailyerrors(root, stations):
    times = [
        datetime.fromtimestamp(int(t)) for t in nc.getvar(root, 'time')[:]
    ]
    days = [t.date() for t in times]
    days.sort()
    days_index = [d.day for d in set(days)]
    days_index.sort()
    days_amount = len(days_index)
    nc.getdim(root, 'diarying', days_amount)
    nc.sync(root)
    measurements = nc.getvar(root, 'measurements')
    estimated = nc.getvar(root, 'globalradiation')
    error_diff = nc.getvar(root, 'errordiff', 'f4', (
        'time',
        'yc_cut',
        'xc_cut',
    ), 4)
    RMS_daily_error = nc.getvar(root, 'RMSdailyerror', 'f4', (
        'diarying',
        'yc_cut',
        'xc_cut',
    ), 4)
    BIAS_daily_error = nc.getvar(root, 'BIASdailyerror', 'f4', (
        'diarying',
        'yc_cut',
        'xc_cut',
    ), 4)
    error_diff[:] = np.zeros(estimated.shape)
    RMS_daily_error[:] = np.zeros(
        (days_amount, estimated.shape[1], estimated.shape[2]))
    BIAS_daily_error[:] = np.zeros(
        (days_amount, estimated.shape[1], estimated.shape[2]))
    nc.sync(root)
    for s in stations:
        index = stations.index(s)
        show('Station: %s \n' % stations[index])
        error_diff[:,
                   index, :] = measurements[:, index, :] - estimated[:,
                                                                     index, :]
        nc.sync(root)
        sum_value_in_day = np.zeros((days_amount))
        for i in range(len(days)):
            d_i = days_index.index(days[i].day)
            if not measurements[i, index, 0] == 0.0:
                sum_value_in_day[d_i] += measurements[i, index, 0]
                RMS_daily_error[d_i, index, :] += np.array(
                    [error_diff[i, index, 0]**2, 1])
                BIAS_daily_error[d_i, index, :] += error_diff[i, index, 0]
        count = RMS_daily_error[:, index, 1]
        count[count == 0] = 1
        RMS_daily_error[:, index, 0] = np.sqrt(
            RMS_daily_error[:, index, 0] / count) / sum_value_in_day * 100
        BIAS_daily_error[:, index, 0] = (BIAS_daily_error[:, index, 0] /
                                         count) / sum_value_in_day * 100
        RMS_daily_error[:, index, 1] = RMS_daily_error[:, index, 0]
        BIAS_daily_error[:, index, 1] = BIAS_daily_error[:, index, 0]
        print 'RMS :', RMS_daily_error[:, index, 0]
        print 'BIAS', BIAS_daily_error[:, index, 0]
        print 'sum value in day: ', sum_value_in_day[:]
        show("\rDiary RMS daily error: %.2f\n" %
             (RMS_daily_error[:, index, 0]).mean())

    nc.sync(root)
    nc.close(root)
Exemple #51
0
 def dem(self):
     if not hasattr(self, '_cached_dem'):
         self._cached_dem = nc.getvar(self.static_cached, 'dem')[:]
     return self._cached_dem
Exemple #52
0
pip install python-dwca-reader
# example usage
from dwca.read import DwCAReader

with DwCAReader('gbif-results.zip') as dwca:
   print("Core data file is: {}".format(dwca.descriptor.core.file_location)) # => 'occurrence.txt'
   # creates a Pandas dataframe
   core_df = dwca.pd_read('occurrence.txt', parse_dates=True)

## NETCDF
# https://pypi.org/project/netcdf/
pip install netcdf
# example usage
from netcdf import netcdf as nc
root, is_new = nc.open('file_*.nc')
data = nc.getvar(root, 'data')
print("Matrix values: ", data[:])

## GEOTIFF (requires GDAL package)
# https://pypi.org/project/georasters/
pip install georasters
# example usage
import georasters as gr
raster = './data/slope.tif'
data = gr.from_file(raster)

## KML, Shapefiles, Esri Geodatabase, Raster
# https://pypi.org/project/geopandas/
pip install geopandas
# https://pypi.org/project/Fiona/
pip install Fiona
 def linke(self):
     if not hasattr(self, "_linke"):
         self._linke = nc.getvar(self.static_cached, "linke")[:]
     return self._linke