Exemple #1
0
    def test_align(self):
        left = create_test_data()
        right = left.copy(deep=True)
        right['dim3'] = ('dim3', list('cdefghijkl'))
        right['var3'][:-2] = right['var3'][2:]
        right['var3'][-2:] = np.random.randn(*right['var3'][-2:].shape)

        intersection = list('cdefghij')
        union = list('abcdefghijkl')

        left2, right2 = align(left, right, join='inner')
        self.assertArrayEqual(left2['dim3'], intersection)
        self.assertDatasetIdentical(left2, right2)

        left2, right2 = align(left, right, join='outer')
        self.assertVariableEqual(left2['dim3'], right2['dim3'])
        self.assertArrayEqual(left2['dim3'], union)
        self.assertDatasetIdentical(left2.labeled(dim3=intersection),
                                    right2.labeled(dim3=intersection))
        self.assertTrue(np.isnan(left2['var3'][-2:]).all())
        self.assertTrue(np.isnan(right2['var3'][:2]).all())

        left2, right2 = align(left, right, join='left')
        self.assertVariableEqual(left2['dim3'], right2['dim3'])
        self.assertVariableEqual(left2['dim3'], left['dim3'])
        self.assertDatasetIdentical(left2.labeled(dim3=intersection),
                                    right2.labeled(dim3=intersection))
        self.assertTrue(np.isnan(right2['var3'][:2]).all())

        left2, right2 = align(left, right, join='right')
        self.assertVariableEqual(left2['dim3'], right2['dim3'])
        self.assertVariableEqual(left2['dim3'], right['dim3'])
        self.assertDatasetIdentical(left2.labeled(dim3=intersection),
                                    right2.labeled(dim3=intersection))
        self.assertTrue(np.isnan(left2['var3'][-2:]).all())
Exemple #2
0
    def test_align(self):
        left = create_test_data()
        right = left.copy(deep=True)
        right['dim3'] = ('dim3', list('cdefghijkl'))
        right['var3'][:-2] = right['var3'][2:]
        right['var3'][-2:] = np.random.randn(*right['var3'][-2:].shape)

        intersection = list('cdefghij')
        union = list('abcdefghijkl')

        left2, right2 = align(left, right, join='inner')
        self.assertArrayEqual(left2['dim3'], intersection)
        self.assertDatasetIdentical(left2, right2)

        left2, right2 = align(left, right, join='outer')
        self.assertVariableEqual(left2['dim3'], right2['dim3'])
        self.assertArrayEqual(left2['dim3'], union)
        self.assertDatasetIdentical(left2.labeled(dim3=intersection),
                                    right2.labeled(dim3=intersection))
        self.assertTrue(np.isnan(left2['var3'][-2:]).all())
        self.assertTrue(np.isnan(right2['var3'][:2]).all())

        left2, right2 = align(left, right, join='left')
        self.assertVariableEqual(left2['dim3'], right2['dim3'])
        self.assertVariableEqual(left2['dim3'], left['dim3'])
        self.assertDatasetIdentical(left2.labeled(dim3=intersection),
                                    right2.labeled(dim3=intersection))
        self.assertTrue(np.isnan(right2['var3'][:2]).all())

        left2, right2 = align(left, right, join='right')
        self.assertVariableEqual(left2['dim3'], right2['dim3'])
        self.assertVariableEqual(left2['dim3'], right['dim3'])
        self.assertDatasetIdentical(left2.labeled(dim3=intersection),
                                    right2.labeled(dim3=intersection))
        self.assertTrue(np.isnan(left2['var3'][-2:]).all())
Exemple #3
0
 def test_align(self):
     self.ds['x'] = ('x', np.array(list('abcdefghij')))
     with self.assertRaises(ValueError):
         self.dv + self.dv[:5]
     dv1, dv2 = align(self.dv, self.dv[:5], join='inner')
     self.assertDataArrayIdentical(dv1, self.dv[:5])
     self.assertDataArrayIdentical(dv2, self.dv[:5])
Exemple #4
0
 def test_align(self):
     self.ds['x'] = ('x', np.array(list('abcdefghij')))
     with self.assertRaises(ValueError):
         self.dv + self.dv[:5]
     dv1, dv2 = align(self.dv, self.dv[:5], join='inner')
     self.assertDataArrayIdentical(dv1, self.dv[:5])
     self.assertDataArrayIdentical(dv2, self.dv[:5])
Exemple #5
0
 def test_align_dtype(self):
     # regression test for #264
     x1 = np.arange(30)
     x2 = np.arange(5, 35)
     a = DataArray(np.random.random((30,)).astype('f32'), {'x': x1})
     b = DataArray(np.random.random((30,)).astype('f32'), {'x': x2})
     c, d = align(a, b, join='outer')
     self.assertEqual(c.dtype, np.float32)
Exemple #6
0
 def test_align_dtype(self):
     # regression test for #264
     x1 = np.arange(30)
     x2 = np.arange(5, 35)
     a = DataArray(np.random.random((30,)).astype('f32'), {'x': x1})
     b = DataArray(np.random.random((30,)).astype('f32'), {'x': x2})
     c, d = align(a, b, join='outer')
     self.assertEqual(c.dtype, np.float32)
def slice_dataset_space(ds_array, ds_to_slice):
    '''
        slices second Dataset to fit first Dataset
        '''
    swe_mask_align, array_align = xray.align(ds_array,
                                             ds_to_slice,
                                             join='inner',
                                             copy=False)
    return (array_align)
Exemple #8
0
 def test_align(self):
     self.ds['x'] = ('x', np.array(list('abcdefghij')))
     dv1, dv2 = align(self.dv, self.dv[:5], join='inner')
     self.assertDataArrayIdentical(dv1, self.dv[:5])
     self.assertDataArrayIdentical(dv2, self.dv[:5])
Exemple #9
0
swe_mask_file = '/raid9/gergel/agg_snowpack/goodleap/SWE/histmeanmask.nc'  ## 1s are swe, 0s are no swe
swe_mask = xray.open_dataset(swe_mask_file)

## rename dimensions
swe_mask.rename({
    "Latitude": "lat",
    "Longitude": "lon",
    "Time": "time"
},
                inplace=True)

swe_mask = swe_mask.squeeze()

## Dataset join
swe_mask_align, pr_full = xray.align(swe_mask, pr, join='inner', copy=False)

del pr
gc.collect()

direc = '/raid/gergel'
pdur_file = 'pduration_mod.nc'  # this is the one that I regridded to 6 x 6 km using cdo remapcon and /raid9/gergel/agg_snowpack/keepextracopies/grid_info_mine
pdur_full = xray.open_dataset(os.path.join(
    direc,
    pdur_file))  ## pdur beta parameter for John's transform from Matt Jolly
'''
for j in np.arange(len(pr_full.lat)):
    for k in np.arange(len(pr_full.lon)):
        if np.isnan(pr_full['precipitation'].values[0,j,k]) == False: 
            lon_ind = np.argmin(np.abs(pdur_full.lon - pr_full.lon[k]))
            lat_ind = np.argmin(np.abs(pdur_full.lat - pr_full.lat[j]))
Exemple #10
0
print("finished chunking data")

## cut out conus east of 103 for each variable
swe_mask_file = '/raid9/gergel/agg_snowpack/goodleap/SWE/histmeanmask.nc'  ## 1s are swe, 0s are no swe
swe_mask = xray.open_dataset(swe_mask_file)
## rename dimensions
swe_mask.rename({
    "Latitude": "lat",
    "Longitude": "lon",
    "Time": "time"
},
                inplace=True)
swe_mask = swe_mask.squeeze()
## Dataset join
swe_mask_align, tmax = xray.align(swe_mask, tmax_f, join='inner', copy=False)
swe_mask_align, tmin = xray.align(swe_mask, tmin_f, join='inner', copy=False)
swe_mask_align, q = xray.align(swe_mask, q_f, join='inner', copy=False)

print("FEEL THE BERN!")

## get julian days
julians = pd.DatetimeIndex(np.asarray(tmin.time)).dayofyear

## delete full arrays of each variable for conus
del tmax_f, tmin_f, q_f, swe_mask, swe_mask_align
gc.collect()

# 100-hr and 1000-hr DFM FUNCTION

# In[27]:
pr = xray.open_dataset(os.path.join(direc,pr_file)) ## load precip

## adjust lat/lon dimensions since the index names are different
lons_new = pr['lon'].values[pr['lon'].values > 180] - 360 
pr['lon'] = lons_new

swe_mask_file = '/raid9/gergel/agg_snowpack/goodleap/SWE/histmeanmask.nc' ## 1s are swe, 0s are no swe 
swe_mask = xray.open_dataset(swe_mask_file)

## rename dimensions
swe_mask.rename({"Latitude": "lat", "Longitude": "lon", "Time": "time"}, inplace=True)

swe_mask = swe_mask.squeeze()

## Dataset join 
swe_mask_align,pr_full = xray.align(swe_mask,pr,join='inner',copy=False)
#swe_mask_align,tmax_align = xray.align(swe_mask,tmax,join='inner',copy=False)

del pr 
gc.collect() 

direc = '/raid/gergel'
pdur_file = 'pduration.nc'
pdur_full = xray.open_dataset(os.path.join(direc,pdur_file)) ## pdur beta parameter for John's transform from Matt Jolly


# In[13]:

# pr_arr = pr_full['precipitation'].values
for j in np.arange(len(pr_full.lat)):
    for k in np.arange(len(pr_full.lon)):
    # Check if any data was found, if so add the DS to the list
    if temp_DS is not None:
        c_DS.append(temp_DS)

# Get last period of data (fence post)
print 'Downloading data ' + Year_rng[-1].strftime('%Y%m%d%H%M') + ' through ' + EndDate.strftime('%Y%m%d%H%M')
c_DS.append(Get_data(m,sta_id,Vars_ext,Year_rng[-1].strftime('%Y%m%d%H%M'),EndDate.strftime('%Y%m%d%H%M')))
print 'Finished downloading data'


# In[8]:

# In case any sites/variables were missing from a given year
# Align each Dataset (fill missing stations/variables with NaN)
newDS = xray.align(*c_DS, join='outer', copy=True)


# In[9]:

## Combine Datasets by time
ds_ALL = xray.concat(newDS,dim='time')


# In[10]:

## Output to netcdf
ds_ALL.to_netcdf(ncfilename)


# In[ ]:
Exemple #13
0
## adjust lat/lon dimensions since the index names are different
tmin_lons_new = tmin_f['lon'].values[tmin_f['lon'].values > 180] - 360 
tmin_f['lon'] = tmin_lons_new
tmax_f['lon'] = tmin_lons_new
q_f['lon'] = tmin_lons_new 

print("finished chunking data")

## cut out conus east of 103 for each variable 
swe_mask_file = '/raid9/gergel/agg_snowpack/goodleap/SWE/histmeanmask.nc' ## 1s are swe, 0s are no swe 
swe_mask = xray.open_dataset(swe_mask_file)
## rename dimensions
swe_mask.rename({"Latitude": "lat", "Longitude": "lon", "Time": "time"}, inplace=True)
swe_mask = swe_mask.squeeze()
## Dataset join 
swe_mask_align,tmax = xray.align(swe_mask,tmax_f,join='inner',copy=False)
swe_mask_align,tmin = xray.align(swe_mask,tmin_f,join='inner',copy=False)
swe_mask_align,q = xray.align(swe_mask,q_f,join='inner',copy=False) 

print("FEEL THE BERN!") 

## get julian days 
julians = pd.DatetimeIndex(np.asarray(tmin.time)).dayofyear

## delete full arrays of each variable for conus 
del tmax_f,tmin_f,q_f,swe_mask,swe_mask_align 
gc.collect() 

# 100-hr and 1000-hr DFM FUNCTION 

# In[27]: