def Create_Clusters_And_Connections(workspace, wbd, output, input_dir, nhru, info, hydrobloks_info): print "Creating and curating the covariates" (covariates, mask) = Create_and_Curate_Covariates(wbd) #Determine the HRUs (clustering if semidistributed; grid cell if fully distributed) print "Computing the HRUs" if hydrobloks_info['model_type'] == 'semi': (cluster_ids, nhru) = Compute_HRUs_Semidistributed_Kmeans(covariates, mask, nhru, hydrobloks_info) elif hydrobloks_info['model_type'] == 'full': nhru = np.sum(mask == True) hydrobloks_info['nhru'] = nhru (cluster_ids, ) = Compute_HRUs_Fulldistributed(covariates, mask, nhru) #Create the netcdf file file_netcdf = hydrobloks_info['input_file'] hydrobloks_info['input_fp'] = nc.Dataset(file_netcdf, 'w', format='NETCDF4') #Retrieve some metadata metadata = gdal_tools.retrieve_metadata(wbd['files']['mask']) resx = metadata['resx'] #Create the dimensions (netcdf) idate = hydrobloks_info['idate'] fdate = hydrobloks_info['fdate'] dt = hydrobloks_info['dt'] ntime = 24 * 3600 * ((fdate - idate).days + 1) / dt nhsu = hydrobloks_info['nhru'] hydrobloks_info['input_fp'].createDimension('hsu', nhsu) hydrobloks_info['input_fp'].createDimension('time', ntime) #Create the groups (netcdf) hydrobloks_info['input_fp'].createGroup('meteorology') #Prepare the flow matrix (dynamic topmodel) print "Calculating the flow matrix" (flow_matrix, outlet) = Calculate_Flow_Matrix(covariates, cluster_ids, nhru) #Prepare the hru connections matrix (darcy clusters) cmatrix = Calculate_HRU_Connections_Matrix(covariates, cluster_ids, nhru, resx) #Define the metadata metadata = gdal_tools.retrieve_metadata(wbd['files']['ti']) #Make the output dictionary for the basin OUTPUT = { 'hsu': {}, 'metadata': metadata, 'mask': mask, 'flow_matrix': flow_matrix, 'cmatrix': cmatrix } OUTPUT['outlet'] = outlet #Remember the map of hrus OUTPUT['hsu_map'] = cluster_ids #Assign the model parameters print "Assigning the model parameters" if hydrobloks_info['model_type'] == 'semi': OUTPUT = Assign_Parameters_Semidistributed(covariates, metadata, hydrobloks_info, OUTPUT, cluster_ids, mask) elif hydrobloks_info['model_type'] == 'full': OUTPUT = Assign_Parameters_Fulldistributed(covariates, metadata, hydrobloks_info, OUTPUT, cluster_ids, mask) #Add the new number of clusters OUTPUT['nhru'] = nhru OUTPUT['mask'] = mask return OUTPUT
def Create_Clusters_And_Connections(workspace,wbd,output,input_dir,nclusters,ncores,info,hydrobloks_info): print "Creating and curating the covariates" (covariates,mask) = Create_and_Curate_Covariates(wbd) #Determine the HRUs (clustering if semidistributed; grid cell if fully distributed) print "Computing the HRUs" if hydrobloks_info['model_type'] == 'semi': if hydrobloks_info['clustering_type'] == 'kmeans': (cluster_ids,nclusters) = Compute_HRUs_Semidistributed_Kmeans(covariates,mask,nclusters,hydrobloks_info) elif hydrobloks_info['clustering_type'] == 'hillslope': (cluster_ids,nclusters) = Compute_HRUs_Semidistributed_Hillslope(covariates,mask,nclusters,hydrobloks_info) elif hydrobloks_info['clustering_type'] == 'basin': (cluster_ids,nclusters) = Compute_HRUs_Semidistributed_Basin(covariates,mask,nclusters,hydrobloks_info) hydrobloks_info['nclusters'] = nclusters elif hydrobloks_info['model_type'] == 'full': nclusters = np.sum(mask == True) hydrobloks_info['nclusters'] = nclusters (cluster_ids,) = Compute_HRUs_Fulldistributed(covariates,mask,nclusters) #Create the netcdf file file_netcdf = hydrobloks_info['input_file'] hydrobloks_info['input_fp'] = nc.Dataset(file_netcdf, 'w', format='NETCDF4') #Create the dimensions (netcdf) idate = hydrobloks_info['idate'] fdate = hydrobloks_info['fdate'] dt = hydrobloks_info['dt'] ntime = 24*3600*((fdate - idate).days+1)/dt nhsu = hydrobloks_info['nclusters'] hydrobloks_info['input_fp'].createDimension('hsu',nhsu) hydrobloks_info['input_fp'].createDimension('time',ntime) #Create the groups (netcdf) hydrobloks_info['input_fp'].createGroup('meteorology') #Prepare the flow matrix print "Calculating the flow matrix" (flow_matrix,outlet) = Calculate_Flow_Matrix(covariates,cluster_ids,nclusters) #Define the metadata metadata = gdal_tools.retrieve_metadata(wbd['files']['ti']) #Make the output dictionary for the basin OUTPUT = {'hsu':{},'metadata':metadata,'mask':mask,'flow_matrix':flow_matrix} OUTPUT['outlet'] = outlet #Determine outlet cell #covariates['carea'][mask == False] = np.nan #outlet_idx = np.where(covariates['carea'] == np.max(covariates['carea'][np.isnan(covariates['carea']) == 0])) #outlet_idx = [int(outlet_idx[0]),int(outlet_idx[1])] #OUTPUT['outlet'] = {'idx':outlet_idx,'hsu':cluster_ids[outlet_idx[0],outlet_idx[1]]} #Remember the map of hrus OUTPUT['hsu_map'] = cluster_ids #Assign the model parameters print "Assigning the model parameters" if hydrobloks_info['model_type'] == 'semi': OUTPUT = Assign_Parameters_Semidistributed(covariates,metadata,hydrobloks_info,OUTPUT,cluster_ids,mask) elif hydrobloks_info['model_type'] == 'full': OUTPUT = Assign_Parameters_Fulldistributed(covariates,metadata,hydrobloks_info,OUTPUT,cluster_ids,mask) #Create the soil parameters file #print "Creating the soil file" Create_Soils_File(hydrobloks_info,OUTPUT,input_dir) #soils_lookup = Create_Soils_File(hydrobloks_info,OUTPUT,input_dir) #Add the new number of clusters OUTPUT['nclusters'] = nclusters OUTPUT['mask'] = mask return OUTPUT
def Prepare_Model_Input_Data(hydrobloks_info): #Prepare the info dictionary info = {} #Define the start/end dates info['time_info'] = {} info['time_info']['startdate'] = hydrobloks_info['idate'] info['time_info']['enddate'] = hydrobloks_info['fdate'] info['time_info']['dt'] = hydrobloks_info['dt'] #Define the workspace workspace = hydrobloks_info['workspace'] #Define the model input data directory input_dir = workspace #'%s/input' % workspace #Read in the metadata #file = '%s/workspace_info.pck' % workspace #wbd = pickle.load(open(file)) #Create the dictionary to hold all of the data output = {} #Create the Latin Hypercube (Clustering) nhru = hydrobloks_info['nhru'] #ncores = hydrobloks_info['ncores'] icatch = hydrobloks_info['icatch'] #Get metadata md = gdal_tools.retrieve_metadata('%s/mask_latlon.tif' % workspace) #Prepare the input file wbd = {} wbd['bbox'] = { 'minlat': md['miny'], 'maxlat': md['maxy'], 'minlon': md['minx'], 'maxlon': md['maxx'], 'res': abs(md['resx']) } wbd['files'] = { 'WLTSMC': '%s/theta1500_ea.tif' % workspace, 'TEXTURE_CLASS': '%s/texture_class_ea.tif' % workspace, 'cslope': '%s/cslope_ea.tif' % workspace, 'MAXSMC': '%s/thetas_ea.tif' % workspace, 'BB': '%s/bb_ea.tif' % workspace, 'DRYSMC': '%s/thetar_ea.tif' % workspace, 'fdir': '%s/fdir_ea.tif' % workspace, 'QTZ': '%s/qtz_ea.tif' % workspace, 'SATDW': '%s/dsat_ea.tif' % workspace, 'REFSMC': '%s/theta33_ea.tif' % workspace, 'mask': '%s/mask_ea.tif' % workspace, 'SATPSI': '%s/psisat_ea.tif' % workspace, 'lc': '%s/lc_ea.tif' % workspace, 'carea': '%s/carea_ea.tif' % workspace, 'ti': '%s/ti_ea.tif' % workspace, 'ndvi': '%s/ndvi_ea.tif' % workspace, 'F11': '%s/f11_ea.tif' % workspace, 'SATDK': '%s/ksat_ea.tif' % workspace, 'dem': '%s/dem_ea.tif' % workspace, 'demns': '%s/demns_ea.tif' % workspace, 'sand': '%s/sand_ea.tif' % workspace, 'clay': '%s/clay_ea.tif' % workspace, 'silt': '%s/silt_ea.tif' % workspace, 'om': '%s/om_ea.tif' % workspace, 'bare30': '%s/bare30_ea.tif' % workspace, 'water30': '%s/water30_ea.tif' % workspace, 'tree30': '%s/tree30_ea.tif' % workspace, } wbd['files_meteorology'] = { 'lwdown': '%s/lwdown.nc' % workspace, 'swdown': '%s/swdown.nc' % workspace, 'tair': '%s/tair.nc' % workspace, 'precip': '%s/precip.nc' % workspace, 'psurf': '%s/psurf.nc' % workspace, 'wind': '%s/wind.nc' % workspace, 'spfh': '%s/spfh.nc' % workspace, } #Create the clusters and their connections output = Create_Clusters_And_Connections(workspace, wbd, output, input_dir, nhru, info, hydrobloks_info) #Extract the meteorological forcing print "Preparing the meteorology" if hydrobloks_info['model_type'] == 'semi': Prepare_Meteorology_Semidistributed(workspace, wbd, output, input_dir, info, hydrobloks_info) elif hydrobloks_info['model_type'] == 'full': Prepare_Meteorology_Fulldistributed(workspace, wbd, output, input_dir, info, hydrobloks_info) #Write out the files to the netcdf file fp = hydrobloks_info['input_fp'] data = output #Write out the metadata grp = fp.createGroup('metadata') grp.latitude = (wbd['bbox']['minlat'] + wbd['bbox']['maxlat']) / 2 lon = (wbd['bbox']['minlon'] + wbd['bbox']['maxlon']) / 2 if lon < 0: lon += 360 grp.longitude = lon metadata = gdal_tools.retrieve_metadata(wbd['files']['mask']) grp.dx = metadata['resx'] #grp.longitude = (360.0+(wbd['bbox']['minlon'] + wbd['bbox']['maxlon'])/2) #Write the HRU mapping #CONUS conus_albers metadata metadata['nodata'] = -9999.0 #Save the conus_albers metadata grp = fp.createGroup('conus_albers_mapping') grp.createDimension('nx', metadata['nx']) grp.createDimension('ny', metadata['ny']) hmca = grp.createVariable('hmca', 'f4', ('ny', 'nx')) hmca.gt = metadata['gt'] hmca.projection = metadata['projection'] hmca.description = 'HSU mapping (conus albers)' hmca.nodata = metadata['nodata'] #Save the conus albers mapping hsu_map = np.copy(output['hsu_map']) hsu_map[np.isnan(hsu_map) == 1] = metadata['nodata'] hmca[:] = hsu_map #Write out the mapping file_ca = '%s/hsu_mapping_ea.tif' % workspace gdal_tools.write_raster(file_ca, metadata, hsu_map) #Map the mapping to regular lat/lon file_ll = '%s/hsu_mapping_latlon.tif' % workspace os.system('rm -f %s' % file_ll) res = wbd['bbox']['res'] minlat = wbd['bbox']['minlat'] minlon = wbd['bbox']['minlon'] maxlat = wbd['bbox']['maxlat'] maxlon = wbd['bbox']['maxlon'] log = '%s/log.txt' % workspace os.system( 'gdalwarp -tr %.16f %.16f -dstnodata %.16f -t_srs \'+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs \' -te %.16f %.16f %.16f %.16f %s %s >> %s 2>&1' % (res, res, metadata['nodata'], minlon, minlat, maxlon, maxlat, file_ca, file_ll, log)) #Write a map for the catchment id file_icatch = '%s/icatch_latlon.tif' % workspace metadata = gdal_tools.retrieve_metadata(file_ll) metadata['nodata'] = -9999.0 tmp = gdal_tools.read_raster(file_ll) tmp[tmp >= 0] = hydrobloks_info['icatch'] gdal_tools.write_raster(file_icatch, metadata, tmp) #Retrieve the lat/lon metadata #metadata = gdal_tools.retrieve_metadata(file_ll) #metadata['nodata'] = -9999.0 #Save the lat/lon metadata #grp = fp.createGroup('latlon_mapping') #grp.createDimension('nlon',metadata['nx']) #grp.createDimension('nlat',metadata['ny']) #hmll = grp.createVariable('hmll','f4',('nlat','nlon')) #hmll.gt = metadata['gt'] #hmll.projection = metadata['projection'] #hmll.description = 'HSU mapping (regular lat/lon)' #hmll.nodata = metadata['nodata'] #Save the lat/lon mapping #hsu_map = np.copy(gdal_tools.read_raster(file_ll)) #hsu_map[np.isnan(hsu_map) == 1] = metadata['nodata'] #hmll[:] = hsu_map #Write the flow matrix flow_matrix = output['flow_matrix'] nconnections = flow_matrix.data.size grp = fp.createGroup('flow_matrix') grp.createDimension('connections_columns', flow_matrix.indices.size) grp.createDimension('connections_rows', flow_matrix.indptr.size) grp.createVariable('data', 'f4', ('connections_columns', )) grp.createVariable('indices', 'f4', ('connections_columns', )) grp.createVariable('indptr', 'f4', ('connections_rows', )) grp.variables['data'][:] = flow_matrix.data grp.variables['indices'][:] = flow_matrix.indices grp.variables['indptr'][:] = flow_matrix.indptr #Write the connection matrices #width wmatrix = output['cmatrix']['width'] nconnections = wmatrix.data.size grp = fp.createGroup('wmatrix') grp.createDimension('connections_columns', wmatrix.indices.size) grp.createDimension('connections_rows', wmatrix.indptr.size) grp.createVariable('data', 'f4', ('connections_columns', )) grp.createVariable('indices', 'f4', ('connections_columns', )) grp.createVariable('indptr', 'f4', ('connections_rows', )) grp.variables['data'][:] = wmatrix.data grp.variables['indices'][:] = wmatrix.indices grp.variables['indptr'][:] = wmatrix.indptr #Write the outlet information outlet = output['outlet'] grp = fp.createGroup('outlet') full = grp.createGroup('full') full.createDimension('cell', outlet['full']['hru_org'].size) full.createVariable('i', 'i4', ('cell', )) full.createVariable('j', 'i4', ('cell', )) full.createVariable('hru_org', 'i4', ('cell', )) full.createVariable('hru_dst', 'i4', ('cell', )) full.createVariable('d8', 'i4', ('cell', )) full.variables['i'][:] = outlet['full']['i'] full.variables['j'][:] = outlet['full']['j'] full.variables['hru_org'][:] = outlet['full']['hru_org'] full.variables['hru_dst'][:] = outlet['full']['hru_dst'] full.variables['d8'][:] = outlet['full']['d8'] summary = grp.createGroup('summary') summary.createDimension('hru', outlet['summary']['hru_org'].size) summary.createVariable('hru_org', 'i4', ('hru', )) summary.createVariable('hru_dst', 'i4', ('hru', )) summary.createVariable('counts', 'i4', ('hru', )) summary.variables['hru_org'][:] = outlet['summary']['hru_org'] summary.variables['hru_dst'][:] = outlet['summary']['hru_dst'] summary.variables['counts'][:] = outlet['summary']['counts'] #outlet = {'full':{'i':outlet_icoord,'j':outlet_jcoord,'hru_org':outlet_hru_org,'hru_dst':outlet_hru_dst,'d8':outlet_d8}, # 'summary':{'hru_org':outlet_hru_org_summary,'hru_dst':outlet_hru_dst_summary,'counts':counts}} #Write the model parameters grp = fp.createGroup('parameters') vars = [ 'slope', 'area_pct', 'land_cover', 'channel', 'dem', 'soil_texture_class', 'ti', 'carea', 'area', 'BB', 'F11', 'SATPSI', 'SATDW', 'QTZ', 'WLTSMC', 'MAXSMC', 'DRYSMC', 'REFSMC', 'SATDK', 'mannings', 'm', 'psoil', 'pksat', 'sdmax' ] for var in vars: grp.createVariable(var, 'f4', ('hsu', )) grp.variables[var][:] = data['hsu'][var] #Write other metadata #grp = fp.createGroup('metadata') #grp.outlet_hsu = data['outlet']['hsu'] #Remove info from output del output['hsu'] #Add in the catchment info output['wbd'] = wbd #Close the file fp.close() return output
def Prepare_Model_Input_Data(hydrobloks_info): #Prepare the info dictionary info = {} #Define the start/end dates info['time_info'] = {} info['time_info']['startdate'] = hydrobloks_info['idate'] info['time_info']['enddate'] = hydrobloks_info['fdate'] #Define the workspace workspace = hydrobloks_info['workspace'] #Define the model input data directory input_dir = workspace#'%s/input' % workspace #Read in the metadata file = '%s/workspace_info.pck' % workspace wbd = pickle.load(open(file)) #Create the dictionary to hold all of the data output = {} #Create the Latin Hypercube (Clustering) nclusters = hydrobloks_info['nclusters'] ncores = hydrobloks_info['ncores'] icatch = hydrobloks_info['icatch'] #Prepare the input file wbd['files'] = { 'WLTSMC':'%s/WLTSMC.tif' % workspace, 'TEXTURE_CLASS':'%s/TEXTURE_CLASS.tif' % workspace, 'cslope':'%s/cslope.tif' % workspace, 'MAXSMC':'%s/MAXSMC.tif' % workspace, 'BB':'%s/BB.tif' % workspace, 'DRYSMC':'%s/DRYSMC.tif' % workspace, 'fdir':'%s/fdir.tif' % workspace, 'QTZ':'%s/QTZ.tif' % workspace, 'SATDW':'%s/SATDW.tif' % workspace, 'REFSMC':'%s/REFSMC.tif' % workspace, 'mask':'%s/mask.tif' % workspace, 'channels':'%s/channels.tif' % workspace, #'SATDW':'%s/SATDW.tif' % workspace, #'REFSMC':'%s/REFSMC.tif' % workspace, 'SATPSI':'%s/SATPSI.tif' % workspace, 'nlcd':'%s/nlcd.tif' % workspace, 'carea':'%s/carea.tif' % workspace, 'ti':'%s/ti.tif' % workspace, 'ndvi':'%s/ndvi.tif' % workspace, 'F11':'%s/F11.tif' % workspace, 'SATDK':'%s/SATDK.tif' % workspace, 'dem':'%s/dem.tif' % workspace, 'demns':'%s/demns.tif' % workspace, 'strahler':'%s/strahler.tif' % workspace, #'qbase':'%s/qbase.tif' % workspace } wbd['files_meteorology'] = { 'dlwrf':'%s/nldas/dlwrf/dlwrf.nc' % workspace, 'dswrf':'%s/nldas/dswrf/dswrf.nc' % workspace, 'tair':'%s/nldas/tair/tair.nc' % workspace, 'prec':'%s/nldas/prec/prec.nc' % workspace, 'pres':'%s/nldas/pres/pres.nc' % workspace, 'wind':'%s/nldas/wind/wind.nc' % workspace, 'rh':'%s/nldas/rh/rh.nc' % workspace, 'apcpsfc':'%s/stageiv/apcpsfc/apcpsfc.nc' % workspace, } #Create the clusters and their connections output = Create_Clusters_And_Connections(workspace,wbd,output,input_dir,nclusters,ncores,info,hydrobloks_info) #Extract the meteorological forcing print "Preparing the meteorology" if hydrobloks_info['model_type'] == 'semi': Prepare_Meteorology_Semidistributed(workspace,wbd,output,input_dir,info,hydrobloks_info) elif hydrobloks_info['model_type'] == 'full': Prepare_Meteorology_Fulldistributed(workspace,wbd,output,input_dir,info,hydrobloks_info) #Write out the files to the netcdf file fp = hydrobloks_info['input_fp'] data = output #Write out the metadata grp = fp.createGroup('metadata') grp.latitude = (wbd['bbox']['minlat'] + wbd['bbox']['maxlat'])/2 grp.longitude = (360.0+(wbd['bbox']['minlon'] + wbd['bbox']['maxlon'])/2) #Write the HRU mapping #CONUS conus_albers metadata metadata = gdal_tools.retrieve_metadata(wbd['files']['mask']) metadata['nodata'] = -9999.0 #Save the conus_albers metadata grp = fp.createGroup('conus_albers_mapping') grp.createDimension('nx',metadata['nx']) grp.createDimension('ny',metadata['ny']) hmca = grp.createVariable('hmca','f4',('ny','nx')) hmca.gt = metadata['gt'] hmca.projection = metadata['projection'] hmca.description = 'HSU mapping (conus albers)' hmca.nodata = metadata['nodata'] #Save the conus albers mapping hsu_map = np.copy(output['hsu_map']) hsu_map[np.isnan(hsu_map) == 1] = metadata['nodata'] hmca[:] = hsu_map if hydrobloks_info['create_mask_flag'] == True: #Write out the mapping file_ca = '%s/hsu_mapping_conus_albers.tif' % workspace gdal_tools.write_raster(file_ca,metadata,hsu_map) #Map the mapping to regular lat/lon file_ll = '%s/hsu_mapping_latlon.tif' % workspace os.system('rm -f %s' % file_ll) res = wbd['bbox']['res'] minlat = wbd['bbox']['minlat'] minlon = wbd['bbox']['minlon'] maxlat = wbd['bbox']['maxlat'] maxlon = wbd['bbox']['maxlon'] log = '%s/log.txt' % workspace os.system('gdalwarp -tr %.16f %.16f -dstnodata %.16f -t_srs EPSG:4326 -s_srs EPSG:102039 -te %.16f %.16f %.16f %.16f %s %s >> %s 2>&1' % (res,res,metadata['nodata'],minlon,minlat,maxlon,maxlat,file_ca,file_ll,log)) #Write a map for the catchment id file_icatch = '%s/icatch_latlon.tif' % workspace metadata = gdal_tools.retrieve_metadata(file_ll) metadata['nodata'] = -9999.0 tmp = gdal_tools.read_raster(file_ll) tmp[tmp >= 0] = hydrobloks_info['icatch'] gdal_tools.write_raster(file_icatch,metadata,tmp) #Add the lat/lon mapping #Retrieve the lat/lon metadata metadata = gdal_tools.retrieve_metadata(file_ll) metadata['nodata'] = -9999.0 #Save the lat/lon metadata grp = fp.createGroup('latlon_mapping') grp.createDimension('nlon',metadata['nx']) grp.createDimension('nlat',metadata['ny']) hmll = grp.createVariable('hmll','f4',('nlat','nlon')) hmll.gt = metadata['gt'] hmll.projection = metadata['projection'] hmll.description = 'HSU mapping (regular lat/lon)' hmll.nodata = metadata['nodata'] #Save the lat/lon mapping hsu_map = np.copy(gdal_tools.read_raster(file_ll)) hsu_map[np.isnan(hsu_map) == 1] = metadata['nodata'] hmll[:] = hsu_map #Write the flow matrix flow_matrix = output['flow_matrix'] nconnections = flow_matrix.data.size grp = fp.createGroup('flow_matrix') grp.createDimension('connections_columns',flow_matrix.indices.size) grp.createDimension('connections_rows',flow_matrix.indptr.size) grp.createVariable('data','f4',('connections_columns',)) grp.createVariable('indices','f4',('connections_columns',)) grp.createVariable('indptr','f4',('connections_rows',)) grp.variables['data'][:] = flow_matrix.data grp.variables['indices'][:] = flow_matrix.indices grp.variables['indptr'][:] = flow_matrix.indptr #Write the outlet information outlet = output['outlet'] grp = fp.createGroup('outlet') full = grp.createGroup('full') full.createDimension('cell',outlet['full']['hru_org'].size) full.createVariable('i','i4',('cell',)) full.createVariable('j','i4',('cell',)) full.createVariable('hru_org','i4',('cell',)) full.createVariable('hru_dst','i4',('cell',)) full.createVariable('d8','i4',('cell',)) full.variables['i'][:] = outlet['full']['i'] full.variables['j'][:] = outlet['full']['j'] full.variables['hru_org'][:] = outlet['full']['hru_org'] full.variables['hru_dst'][:] = outlet['full']['hru_dst'] full.variables['d8'][:] = outlet['full']['d8'] summary = grp.createGroup('summary') summary.createDimension('hru',outlet['summary']['hru_org'].size) summary.createVariable('hru_org','i4',('hru',)) summary.createVariable('hru_dst','i4',('hru',)) summary.createVariable('counts','i4',('hru',)) summary.variables['hru_org'][:] = outlet['summary']['hru_org'] summary.variables['hru_dst'][:] = outlet['summary']['hru_dst'] summary.variables['counts'][:] = outlet['summary']['counts'] #outlet = {'full':{'i':outlet_icoord,'j':outlet_jcoord,'hru_org':outlet_hru_org,'hru_dst':outlet_hru_dst,'d8':outlet_d8}, # 'summary':{'hru_org':outlet_hru_org_summary,'hru_dst':outlet_hru_dst_summary,'counts':counts}} #Write the model parameters grp = fp.createGroup('parameters') vars = ['slope','area_pct','land_cover','channel', 'dem','soil_texture_class','ti','carea','area', 'BB','F11','SATPSI','SATDW','QTZ', 'WLTSMC','MAXSMC','DRYSMC','REFSMC','SATDK', 'mannings','m','psoil','pksat','sdmax'] for var in vars: grp.createVariable(var,'f4',('hsu',)) grp.variables[var][:] = data['hsu'][var] #Write other metadata #grp = fp.createGroup('metadata') #grp.outlet_hsu = data['outlet']['hsu'] #Remove info from output del output['hsu'] #Add in the catchment info output['wbd'] = wbd #Close the file fp.close() return output