Beispiel #1
0
 def read_ts(self, substance=0, layer=0, row=0, column=0):
     try:
         filename = self.get_concentration_file_from_substance(substance)
         ucn_obj = bf.UcnFile(filename=filename, precision='single')
         return ucn_obj.get_ts(idx=(layer, row, column)).tolist()
     except:
         return []
Beispiel #2
0
 def read_number_of_layers(self):
     try:
         ucn_obj = bf.UcnFile(filename=self._filename, precision='single')
         number_of_layers = ucn_obj.get_data().shape[0]
         return number_of_layers
     except:
         return 0
Beispiel #3
0
 def read_kstpkper(self, substance=0):
     try:
         filename = self.get_concentration_file_from_substance(substance)
         ucn_obj = bf.UcnFile(filename=filename, precision='single')
         kstpkper = ucn_obj.get_kstpkper()
         if kstpkper is not None:
             return kstpkper
         return []
     except:
         return []
Beispiel #4
0
 def read_times(self, substance=0):
     try:
         filename = self.get_concentration_file_from_substance(substance)
         ucn_obj = bf.UcnFile(filename=filename, precision='single')
         times = ucn_obj.get_times()
         if times is not None:
             return times
         return []
     except:
         return []
Beispiel #5
0
 def read_layer(self, totim, layer):
     try:
         ucn_obj = bf.UcnFile(filename=self._filename, precision='single')
         data = ucn_obj.get_data(totim=totim, mflay=layer).tolist()
         for i in range(len(data)):
             for j in range(len(data[i])):
                 data[i][j] = round(data[i][j], 2)
                 if data[i][j] < -999:
                     data[i][j] = None
         return data
     except:
         return []
Beispiel #6
0
 def read_layer_by_kstpkper(self, substance=0, kstpkper=(0, 0), layer=0):
     try:
         filename = self.get_concentration_file_from_substance(substance)
         ucn_obj = bf.UcnFile(filename=filename, precision='single')
         data = ucn_obj.get_data(kstpkper=kstpkper, mflay=layer).tolist()
         for i in range(len(data)):
             for j in range(len(data[i])):
                 data[i][j] = round(data[i][j], 2)
                 if data[i][j] > 1e29:
                     data[i][j] = None
         return data
     except:
         return []
Beispiel #7
0
def Get_Conc(Times, units):
    ucnobj = bf.UcnFile('MT3D001.UCN')
    times = ucnobj.get_times()
    concList = []
    for i in Times:
        concList.append('conc_%dd' % (i))

    conc_dict = {}
    for index, item in enumerate(concList):
        conc_dict[item] = ucnobj.get_data(totim=Times[index])

    density = 1500

    if units == 'g':
        cF = (1000 * prsity
              ) / density  # conversion factor to convert from kg m**3 to mug L
    else:
        cF = 10**6

    for k in conc_dict.keys():
        conc_dict[k] = [cF * x for x in conc_dict[k]]

    return conc_dict
Beispiel #8
0
def henry_domain(figsize=(15,10)):

	delx = 0.1
	delz = 0.1
	ncol,nlay = 120, 20

	x = np.linspace(0, ncol*delx, num=ncol+1, endpoint=True)
	z = np.linspace(0, -nlay*delz, num=nlay+1,endpoint=True)

	ucn = bf.UcnFile(os.path.join("henry","misc","MT3D001.UCN"))
	times = ucn.get_times()
	conc1,conc2 = ucn.get_data(totim=times[0])[:,0,:].copy(), ucn.get_data(totim=times[1])[:,0,:]
	xc = np.arange(0,ncol*delx,delx)
	zc = np.arange(-delz,-(nlay+1)*delz,-delz)
	X,Z = np.meshgrid(xc,zc)

	f = open(os.path.join("henry","misc", "bore_coords_henry_coarse_conc.dat"), 'r')
	cnames, cx, cz = [], [], []
	for line in f:
		raw = line.strip().split()
		xx = float(raw[1])
		l = int(raw[3])
		zz = z[l - 1] - (delz / 2.0)
		cnames.append(raw[0])
		cx.append(xx)
		cz.append(zz)
	f.close()

	f = open(os.path.join("henry","misc", "bore_coords_henry_coarse_head.dat"), 'r')
	hnames, hx, hz = [], [], []
	for line in f:
		raw = line.strip().split()
		xx = float(raw[1])
		l = int(raw[3])
		zz = z[l - 1] - (delz / 2.0)
		hnames.append(raw[0])
		hx.append(xx)
		hz.append(zz)
	f.close()


	pp_locs = np.loadtxt(os.path.join("henry","misc", "pp_locs.dat"), usecols=[1, 2])


	fig = plt.figure(figsize=figsize)
	ax = plt.axes((0.085, 0.575, 0.875, 0.485),aspect="equal")
	ax2 = plt.axes((0.085, 0.2, 0.875, 0.485),aspect="equal")
	zlim = [z.min(), z.max()]
	xlim = [x.min(), x.max()]
	fp = FontProperties()
	fp.set_size("large")

	ax.contour(X,Z,conc1,levels=[0.1],colors='0.5',linewidths=[2])
	cont = ax2.contour(X,Z,conc2,levels=[0.1],colors='0.5',linewidths=[2])

	ax.scatter(hx, hz, marker='o', edgecolor='k', facecolor="none", s=50, label="head obs")
	ax.scatter(cx, cz, marker='x', color='k', s=25, label="concentration obs")
	ax2.scatter(cx[9], cz[9], marker='x', color='k', s=25)


	for xx, zz, nn in zip(hx, hz, hnames):
		ax.text(xx,zz+0.125,nn[-2:],ha="center",va="bottom",fontsize="large",
		    bbox={"fc":"1.0","boxstyle":"round,pad=0.01","ec":"none"})
		if "10" in nn:
			ax2.text(xx,zz+0.125,nn[-2:],ha="center",va="bottom",fontsize="large",
    				bbox={"fc":"1.0","boxstyle":"round,pad=0.01","ec":"none"})

	ax.scatter(pp_locs[:, 0], pp_locs[:, 1], marker='.', s=3,color='b', label="pilot point")
	ax2.scatter(pp_locs[:, 0], pp_locs[:, 1], marker='.', s=3,color='b', label="pilot point")

	fresh = Rectangle((x.min(), z.min()), delx, nlay * delz, color='b')
	ax.add_patch(fresh)
	fresh2 = Rectangle((x.min(), z.min()), delx, nlay * delz, color='b')
	ax2.add_patch(fresh2)

	salt = Rectangle((x.max() - delx, z.min()), delx,nlay*delz, color='r')
	ax.add_patch(salt)
	salt2 = Rectangle((x.max() - delx, z.min()), delx,nlay*delz, color='r')
	ax2.add_patch(salt2)

	#ax.plot([0,0],[0,0],lw=3,color='0.5',label="50% saltwater")
	ax.plot([0,0],[0,0],lw=2,color='0.5',label="10% saltwater")
	#ax.plot([0,0],[0,0],lw=1,color='0.5',label="1% saltwater")

	handles, labels = ax.get_legend_handles_labels()
	handles.append(fresh)
	handles.append(salt)
	labels.append("freshwater boundary")
	labels.append("saltwater boundary")

	ax2.legend(handles,labels, loc="lower center", 
	        ncol=3,frameon=False,prop=fp,
	        bbox_to_anchor=(0.5,-1.2),scatterpoints=1)

	ax.set_xlim(xlim)
	ax.set_ylim(zlim)
	ax.set_xticklabels([])
	ax2.set_xlim(xlim)
	ax2.set_ylim(zlim)
	ax.set_yticklabels(ax.get_yticks(),fontsize="large")
	ax2.set_yticklabels(ax.get_yticks(),fontsize="large")
	ax2.set_xticklabels(ax.get_xticks(),fontsize="large")
	ax.set_ylabel("depth (m)",fontsize="large")
	ax2.set_ylabel("depth (m)",fontsize="large")
	ax2.set_xlabel("length (m)",fontsize="large",labelpad=0.5)
	ax.text(0.0,0.0,"A.) History-matching stress period",ha="left",va="bottom",fontsize="large")
	ax2.text(0.0,0.0,"B.) Forecast stress period",ha="left",va="bottom",fontsize="large")
	# ax2.annotate(
	#     '', xy=(0.095, -1.95), xycoords='data',
	#     xytext=(9.4, -1.95), textcoords='data',
	#     arrowprops={'arrowstyle': '<->',"linewidth":2.0})
	ax2.annotate(
	    '', xy=(0.095, -1.95), xycoords='data',
	    xytext=(9.0, -1.95), textcoords='data',
	    arrowprops={'arrowstyle': '<->',"linewidth":1.5})
	# ax2.annotate(
	#     '', xy=(0.095, -1.8), xycoords='data',
	#     xytext=(8.85, -1.8), textcoords='data',
	#     arrowprops={'arrowstyle': '<->',"linewidth":1.5})
	#ax2.text(4.25,-1.75,'?',fontsize=50)
	return fig
    def run_experiment(self, experiment):
        '''
        Method for running an instantiated model structure. 
        
        This method should always be implemented.
        
        :param case: keyword arguments for running the model. The case is a 
                     dict with the names of the uncertainties as key, and
                     the values to which to set these uncertainties. 
        '''

        #NetLogo agent attributes to be passed to Python well objects
        #when new wells are created in NetLogo
        nl_read_sys_attribs = ['who', 'xcor', 'ycor']
        nl_read_well_attribs = [
            'who', 'xcor', 'ycor', 'IsCold', 'z0', 'FilterLength', 'T_inj', 'Q'
        ]

        #NetLogo agent attributes to be updated by the Python objects after each period
        nl_update_well_attribs = ['T_modflow', 'H_modflow']
        nl_update_globals = ['ztop', 'Laquifer']

        self.netlogo.command('setup')

        for key, value in experiment.items():
            if key in self.NetLogo_uncertainties:
                try:
                    self.netlogo.command(self.command_format.format(
                        key, value))
                except jpype.JavaException as e:
                    warning('Variable {0} throws exception: {}'.format(
                        (key, str(e))))
                logging.debug(self.netlogo.report(str(key)))
            if key in self.SEAWAT_uncertainties:
                setattr(self, key, value)

        #Set policy parameters if present
        if self.policy:
            for key, value in self.policy.items():
                if (key in self.NetLogo_uncertainties and key != 'name'):
                    self.netlogo.command(self.command_format.format(
                        key, value))
                elif key in self.SEAWAT_uncertainties:
                    setattr(self, key, value)
            logging.info('Policy parameters set successfully')

        #Update NetLogo globals from input parameters
        for var in nl_update_globals:
            self.netlogo.command(
                self.command_format.format(var, getattr(self, var)))

        #Run the NetLogo setup routine, creating the agents
        #Create lists of Python objects based on the NetLogo agents
        self.netlogo.command('init-agents')
        sys_obj_list = update_runtime_objectlist(self.netlogo, [],
                                                 nl_read_sys_attribs,
                                                 breed='system',
                                                 objclass=PySystem)
        well_obj_list, newgrid_flag = update_runtime_objectlist(
            self.netlogo, [],
            nl_read_well_attribs,
            breed='well',
            objclass=PyWell)

        #Assign values for uncertain NetLogo parameters
        logging.info('NetLogo parameters set successfully')

        #self.netlogo.command('init-agents')

        #Calculate geohydrological parameters linked to variable inputs
        rho_b = self.rho_solid * (1 - self.PEFF)
        kT_b = self.kT_s * (1 - self.PEFF) + self.kT_f * self.PEFF
        dmcoef = kT_b / (self.PEFF * self.rho_f * self.Cp_f) * 24 * 3600
        trpt = self.al * self.trp_mult
        trpv = trpt

        #Initialize PyGrid object
        itype = mt3.Mt3dSsm.itype_dict()
        grid_obj = PyGrid()
        grid_obj.make_grid(well_obj_list,
                           dmin=self.dmin,
                           dmax=self.dmax,
                           dz=self.dz,
                           ztop=self.ztop,
                           zbot=self.zbot,
                           nstep=self.nstep,
                           grid_extents=self.grid_extents)

        #Initial arrays for grid values (temperature, head) - for this case, assumes no groundwater flow
        #and uniform temperature
        grid_obj.ncol = len(grid_obj.XGR) - 1
        grid_obj.delr = np.diff(grid_obj.XGR)
        grid_obj.nrow = len(grid_obj.YGR) - 1
        grid_obj.delc = -np.diff(grid_obj.YGR)

        grid_obj.top = self.ztop * np.ones([grid_obj.nrow, grid_obj.ncol])
        botm_range = np.arange(self.zbot, self.ztop, self.dz)[::-1]
        botm_2d = np.ones([grid_obj.nrow, grid_obj.ncol])
        grid_obj.botm = botm_2d * botm_range[:, None, None]
        grid_obj.nlay = len(botm_range)

        grid_obj.IBOUND, grid_obj.ICBUND = boundaries(
            grid_obj)  #Create grid boundaries

        #Initial arrays for grid values (temperature, head)
        init_grid = np.ones((grid_obj.nlay, grid_obj.nrow, grid_obj.ncol))
        grid_obj.temp = 10. * init_grid

        grid_obj.HK = self.HK * init_grid
        grid_obj.VK = self.VK * init_grid

        #Set initial heads according to groundwater flow (based on mfLab Utrecht model)
        y_array = np.array([(grid_obj.YGR[:-1] - np.mean(grid_obj.YGR[:-1])) *
                            self.PEFF * -self.gwflow_y / 365 / self.HK])
        y_tile = np.array([np.tile(y_array.T, (1, grid_obj.ncol))])
        x_array = (grid_obj.XGR[:-1] - np.mean(
            grid_obj.XGR[:-1])) * self.PEFF * -self.gwflow_x / 365 / self.HK
        y_tile += x_array
        grid_obj.head = np.tile(y_tile, (grid_obj.nlay, 1, 1))

        #Set times at which to read SEAWAT output for each simulation period
        timprs = np.array([self.perlen])
        nprs = len(timprs)
        logging.info('SEAWAT parameters set successfully')

        #Iterate the coupled model
        for period in range(self.run_length):

            #Set up the text output from NetLogo
            commands = []
            self.fns = {}
            for outcome in self.outcomes:
                #if outcome.time:
                name = outcome.name
                fn = r'{0}{3}{1}{2}'.format(self._working_directory, name,
                                            '.txt', os.sep)
                self.fns[name] = fn
                fn = '"{}"'.format(fn)
                fn = fn.replace(os.sep, '/')

                if self.netlogo.report('is-agentset? {}'.format(name)):
                    #If name is name of an agentset, we
                    #assume that we should count the total number of agents
                    nc = r'{2} {0} {3} {4} {1}'.format(fn, name, 'file-open',
                                                       'file-write', 'count')
                else:
                    #It is not an agentset, so assume that it is
                    #a reporter / global variable
                    nc = r'{2} {0} {3} {1}'.format(fn, name, 'file-open',
                                                   'file-write')
                commands.append(nc)

            c_out = ' '.join(commands)
            self.netlogo.command(c_out)

            logging.info(' -- Simulating period {0} of {1}'.format(
                period, self.run_length))
            #Run the NetLogo model for one tick
            self.netlogo.command('go')
            logging.debug('NetLogo step completed')

            #Create placeholder well list - required for MODFLOW WEL package if no wells active in NetLogo
            well_LRCQ_list = {}
            well_LRCQ_list[0] = [[0, 0, 0, 0]]
            ssm_data = {}
            ssm_data[0] = [[0, 0, 0, 0, itype['WEL']]]

            #Check the well agents which are active in NetLogo, and update the Python objects if required
            #The newgrid_flag indicates whether or not the grid should be recalculated to account for changes
            #in the list of active wells
            if well_obj_list:
                well_obj_list, newgrid_flag = update_runtime_objectlist(
                    self.netlogo, well_obj_list, nl_read_well_attribs)

            if well_obj_list and newgrid_flag:
                #If the list of active wells has changed and if there are active wells, create a new grid object
                newgrid_obj = PyGrid()
                newgrid_obj.make_grid(well_obj_list,
                                      dmin=self.dmin,
                                      dmax=self.dmax,
                                      dz=self.dz,
                                      ztop=self.ztop,
                                      zbot=self.zbot,
                                      nstep=self.nstep,
                                      grid_extents=self.grid_extents)
                #Interpolate the temperature and head arrays to match the new grid
                newgrid_obj.temp = grid_interpolate(grid_obj.temp[0, :, :],
                                                    grid_obj, newgrid_obj)
                newgrid_obj.head = grid_interpolate(grid_obj.head[0, :, :],
                                                    grid_obj, newgrid_obj)
                #Use the new simulation grid
                grid_obj = newgrid_obj

            logging.debug('Python update completed')

            if well_obj_list:
                for i in well_obj_list:
                    #Read well flows from NetLogo and locate each well in the simulation grid
                    i.Q = read_NetLogo_attrib(self.netlogo, 'Q', i.who)
                    i.calc_LRC(grid_obj)
                #Create well and temperature lists following MODFLOW/MT3DMS format
                well_LRCQ_list = create_LRCQ_list(well_obj_list, grid_obj)
                ssm_data = create_conc_list(well_obj_list)

            #Initialize MODFLOW packages using FloPy
            #ml = mf.Modflow(self.name, version='mf2005', exe_name=self.swtexe_name, model_ws=self.dirs[0])
            swtm = swt.Seawat(self.name,
                              exe_name=self.swtexe_name,
                              model_ws=self.dirs[0])
            discret = mf.ModflowDis(swtm,
                                    nrow=grid_obj.nrow,
                                    ncol=grid_obj.ncol,
                                    nlay=grid_obj.nlay,
                                    delr=grid_obj.delr,
                                    delc=grid_obj.delc,
                                    laycbd=0,
                                    top=self.ztop,
                                    botm=self.zbot,
                                    nper=self.nper,
                                    perlen=self.perlen,
                                    nstp=self.nstp,
                                    steady=self.steady)

            bas = mf.ModflowBas(swtm,
                                ibound=grid_obj.IBOUND,
                                strt=grid_obj.head)
            lpf = mf.ModflowLpf(swtm,
                                hk=self.HK,
                                vka=self.VK,
                                ss=0.0,
                                sy=0.0,
                                laytyp=0,
                                layavg=0)

            wel = mf.ModflowWel(swtm, stress_period_data=well_LRCQ_list)

            words = ['head', 'drawdown', 'budget', 'phead', 'pbudget']
            save_head_every = 1
            oc = mf.ModflowOc(swtm)
            pcg = mf.ModflowPcg(swtm,
                                mxiter=200,
                                iter1=200,
                                npcond=1,
                                hclose=0.001,
                                rclose=0.001,
                                relax=1.0,
                                nbpol=0)
            #ml.write_input()

            #Initialize MT3DMS packages
            #mt = mt3.Mt3dms(self.name, 'nam_mt3dms', modflowmodel=ml, model_ws=self.dirs[0])
            adv = mt3.Mt3dAdv(
                swtm,
                mixelm=0,  #-1 is TVD
                percel=1,
                nadvfd=1,
                #Particle based methods
                nplane=0,
                mxpart=250000,
                itrack=3,
                dceps=1e-4,
                npl=5,
                nph=8,
                npmin=1,
                npmax=16)
            btn = mt3.Mt3dBtn(swtm,
                              cinact=-100.,
                              icbund=grid_obj.ICBUND,
                              prsity=self.PEFF,
                              sconc=[grid_obj.temp][0],
                              ifmtcn=-1,
                              chkmas=False,
                              nprobs=0,
                              nprmas=1,
                              dt0=0.0,
                              ttsmult=1.5,
                              ttsmax=20000.,
                              ncomp=1,
                              nprs=nprs,
                              timprs=timprs,
                              mxstrn=9999)
            dsp = mt3.Mt3dDsp(swtm,
                              al=self.al,
                              trpt=trpt,
                              trpv=trpv,
                              dmcoef=dmcoef)
            rct = mt3.Mt3dRct(swtm, isothm=0, ireact=0, igetsc=0, rhob=rho_b)
            gcg = mt3.Mt3dGcg(swtm,
                              mxiter=50,
                              iter1=50,
                              isolve=1,
                              cclose=1e-3,
                              iprgcg=0)
            ssm = mt3.Mt3dSsm(swtm, stress_period_data=ssm_data)
            #mt.write_input()

            #Initialize SEAWAT packages
            # mswtf = swt.Seawat(self.name, 'nam_swt', modflowmodel=ml, mt3dmsmodel=mt,
            #                    model_ws=self.dirs[0])
            swtm.write_input()

            #Run SEAWAT
            #m = mswtf.run_model(silent=True)
            m = swtm.run_model(silent=True)
            logging.debug('SEAWAT step completed')

            #Copy Modflow/MT3DMS output to new files
            shutil.copyfile(
                os.path.join(self.dirs[0], self.name + '.hds'),
                os.path.join(self.dirs[0], self.name + str(period) + '.hds'))
            shutil.copyfile(
                os.path.join(self.dirs[0], 'MT3D001.UCN'),
                os.path.join(self.dirs[0], self.name + str(period) + '.UCN'))

            #Create head file object and read head array for next simulation period
            h_obj = bf.HeadFile(
                os.path.join(self.dirs[0], self.name + str(period) + '.hds'))
            grid_obj.head = h_obj.get_data(totim=self.perlen)

            #Create concentration file object and read temperature array for next simulation period
            t_obj = bf.UcnFile(
                os.path.join(self.dirs[0], self.name + str(period) + '.UCN'))
            grid_obj.temp = t_obj.get_data(totim=self.perlen)

            logging.debug('Output processed')

            if well_obj_list:
                for i in well_obj_list:
                    #Update each active Python well object with the temperature and head at its grid location
                    i.T_modflow = grid_obj.temp[i.L[0], i.R, i.C]
                    i.H_modflow = grid_obj.head[i.L[0], i.R, i.C]
                #Update the NetLogo agents from the corresponding Python objects
                write_NetLogo_attriblist(self.netlogo, well_obj_list,
                                         nl_update_well_attribs)

            #As an example of data exchange, we can calculate the fraction of the simulated grid in which
            #the temperature change is significant, and send this value to a NetLogo global variable
            use = subsurface_use(grid_obj, grid_obj.temp)

            write_NetLogo_global(self.netlogo, 'SubsurfaceUse', use)

            logging.debug('NetLogo update completed')

            h_obj.file.close()
            t_obj.file.close()

        self.netlogo.command('file-close-all')
        self._handle_outcomes()
Beispiel #10
0
mswt.btn.prsity[0].fmtin = "(100E15.6)"
mswt.lpf.hk[0].fmtin = "(BINARY)"
mswt.btn.prsity[1].fmtin = '(BINARY)'

# In[7]:
mswt.write_input()
v = mswt.run_model(silent=False, report=True)

# In[8]:

# Post-process the results
import numpy as np
import flopy.utils.binaryfile as bf

# Load data
ucnobj = bf.UcnFile(os.path.join(workspace, 'MT3D001.UCN'), model=mswt)
times = ucnobj.get_times()
concentration = ucnobj.get_data(totim=times[-1])
cbbobj = bf.CellBudgetFile(os.path.join(workspace, 'henry.cbc'))
times = cbbobj.get_times()
qx = cbbobj.get_data(text='flow right face', totim=times[-1])[0]
qz = cbbobj.get_data(text='flow lower face', totim=times[-1])[0]

# Average flows to cell centers
qx_avg = np.empty(qx.shape, dtype=qx.dtype)
qx_avg[:, :, 1:] = 0.5 * (qx[:, :, 0:ncol - 1] + qx[:, :, 1:ncol])
qx_avg[:, :, 0] = 0.5 * qx[:, :, 0]
qz_avg = np.empty(qz.shape, dtype=qz.dtype)
qz_avg[1:, :, :] = 0.5 * (qz[0:nlay - 1, :, :] + qz[1:nlay, :, :])
qz_avg[0, :, :] = 0.5 * qz[0, :, :]
if not os.path.exists(fig_dir):
    os.mkdir(fig_dir)
tmp_dir = opj(model_dir, '_tmp')
if not os.path.exists(tmp_dir):
    os.mkdir(tmp_dir)

swt = fp.seawat.Seawat(model_nam, exe_name=exe_name, model_ws=model_dir)
dis = fp.modflow.ModflowDis.load(opj(in_dir, model_nam + '.dis'), swt)

# create head object
head_obj = bf.HeadFile(opj(out_pth, 'SBModel.bhd'))
times = head_obj.get_times()
kskp = head_obj.get_kstpkper()

cl_obj = bf.UcnFile(opj(out_pth, 'SBModel_Cl.ucn'),
                    text='CONCENTRATION',
                    model=swt)
# tstp = cl_obj.get_kstpkper()
# times = cl_obj.get_times()

cbc_obj = bf.CellBudgetFile(opj(out_pth, 'SBModel.cbc'))
ts = cbc_obj.get_times()
sp = cbc_obj.get_kstpkper()
cbc_obj.get_unique_record_names()
nrec = cbc_obj.get_nrecords()

recharge = cbc_obj.get_data(kstpkper=sp[-1], text='RECHARGE', full3D=True)[0]
fig = plt.figure(figsize=(10, 10))
ax = fig.add_subplot(1, 1, 1, aspect='equal')
ax.imshow(recharge[0, :, :], interpolation='nearest')
plt.savefig(os.path.join(fig_dir, 's{}_{}_recharge.png'.format(scen, sched)))
def headsReader(filepath_to_modelfiles,
                filepath_to_inputs,
                filepath_to_outputs,
                x_offset,
                y_offset,
                model_start,
                plot_bool,
                extension='.hds',
                layers_desired=None):

    """

    Parameters
    ----------
    filepath_to_modelfiles : str
        string indicating location of modelfiles

    filepath_to_inputs : str
        string indicating location of inputs (.shp, .csv, or .xslx)

    filepath_to_outputs : str
        string indicating where outputs should be written

    x_offset : int, flt
        x-coordinate of the lower left corner of the model grid

    y_offset : int, flt
        y-coordinate of the lower left corner of the model grid

    model_start : int, flt
        the date of the start of the model in decimal year format

    plot_bool : boolean
        indicates whether to produce plots or not

    extension : str
        indicates the file type to be read in and data extracted from

    layers_desired : int, optional
        indicates specific layer of interest, if None, all layers' data will be returned

    Returns
    -------
    df : pandas dataframe
        A pandas dataframe with two time columns (decimal_year and model_days) and relevant data at each point for each
        layer. Data is stored in columns formatted as "name_lay#"

    """

    ### IMPORTS ================================================================
    import flopy
    import flopy.utils.binaryfile as bf
    import geopandas as geopd
    import matplotlib.pyplot as plt
    import numpy as np
    import os
    import pandas as pd
    import shutil

    ### HOMBREW IMPORTS
    from modules.EStLMt3dModule import load_model_run

    ### MAIN SCRIPT ============================================================

    ### read in and adjust model properties (vistas uses relative coords)

    # load in the model using bespoke function originally for EStL
    mf = load_model_run(filepath_to_modelfiles)

    # read all of current mf.dis properties into dis and rewrite dis object with new xul, yul from user inputs
    dis = mf.dis
    dis = flopy.modflow.ModflowDis(mf,
                                   nlay=dis.nlay,
                                   nrow=dis.nrow,
                                   ncol=dis.ncol,
                                   nper=dis.nper,
                                   delr=dis.delr.array,
                                   delc=dis.delc.array,
                                   top=dis.top.array,
                                   botm=dis.botm.array,
                                   perlen=dis.perlen.array,
                                   nstp=dis.nstp.array,
                                   steady=dis.steady.array,
                                   xul=x_offset,
                                   yul=y_offset+np.sum(dis.delc.array)
                                   )

    # read these dis updates into the mf object
    mf.update_modelgrid()


    if 'hds' in extension:
        # read in the head file
        print('Obtaining requested head results.\n')
        binary = bf.HeadFile(mf.model_ws+mf.name+'.hds')
    elif 'ucn' in extension:
        # find the correct file name
        print('Obtaining requested concentration results.\n')
        possible_files = []
        path = mf.model_ws; found = False;
        while not found:
            for item in os.listdir(path):
                if '.ucn' in item:
                    possible_files.append(item)
            if len(possible_files) > 1:
                print('Selected possible filenames include:')
                for enum, item in enumerate(possible_files):
                    print(enum, item)
                idx = int(input('Please input the number of desired filename.\r'))
                fname = possible_files[idx]; found = True;
            elif len(possible_files) == 1:
                fname = possible_files[0]; found = True;
            elif len(possible_files) < 1:
                print('No file found in: '+path+'\n')
                a = path.split('/')
                try:
                    _ = a.pop(-1)
                except:
                    UserWarning("***ISWS***: Problems finding filenames in directory: " + mf.model_ws)
                    path = input('Please input location of {} file.\r'.format(extension))
                # re-run process in one folder back on the directory
                path = '/'.join(a)
        # read in the data
        binary = bf.UcnFile(os.path.join(path, fname))

    # load data
    raw_data = binary.get_alldata()
    binary.close()

    # ignore what I assume are inactive cells with negative heads
    raw_data = np.where(raw_data < 0, np.nan, raw_data)

    ### create geopandas array of the model domain to test for points outside domain (won't have .prj)
    dis.export(filepath_to_outputs+'grid.shp')

    # steal .prj from input and copy into location w/ grid.shp

    ### look for observations source in inputs folder and copy .prj to outputs for grid.shp
    loc_list = [] # for multiple files
    for item in os.listdir(filepath_to_inputs):

        fn, fext = os.path.splitext(filepath_to_inputs + item)

        if fext == '.shp':
            obs_pts = geopd.read_file(fn+fext)

        if fext == '.prj':
            shutil.copy(fn+fext, filepath_to_outputs+'grid.prj')

        if fext == '.xlsx':
            # import all sheets as a dict of dataframes, dict keys will be sheet names
            xl_dict = pd.read_excel(fn+fext, sheet_name=None)
            for key in xl_dict.keys():
                if 'lamx' in xl_dict[key].keys():
                    loc_list.append([ key, xl_dict[key]['lamx'][0], xl_dict[key]['lamy'][0] ])

            obs_pts = pd.DataFrame(loc_list)
            obs_pts.columns = ['name', 'lamx', 'lamy']

            # convert pd to geopd
            obs_pts = geopd.GeoDataFrame(obs_pts,
                                         geometry=geopd.points_from_xy(obs_pts.lamx,
                                                                       obs_pts.lamy))

    # import grid shapefile now that we have the .prj there
    grd_shp = geopd.read_file(filepath_to_outputs + 'grid.shp')

    # clip points shapefile with grid shapefile and report how many have been excluded
    num_pts_og = len(obs_pts)
    obs_pts = geopd.clip(obs_pts, grd_shp, keep_geom_type=True)
    obs_pts = obs_pts.reset_index(drop=True)
    print("ISWS: " + str(num_pts_og - len(obs_pts)) + " points were outside of the model area and are not considered.")

    ### loop through observation points and create hydrographs

    # first, get date/time columns sorted, superlist and colnames will eventually become end dataframe
    superlist = []
    colnames = []
    decimal_year = []
    for iii, perlen in enumerate(mf.dis.perlen.array):
        decimal_year.append(model_start + (np.sum(mf.dis.perlen.array[:iii])/365.25))

    model_days = list(np.cumsum(mf.dis.perlen.array))
    superlist.append(decimal_year)
    colnames.append('decimal_year')
    superlist.append(model_days)
    colnames.append('model_days')

    # now loop through points, layers, stress periods to get requested data at each point for each layer through time
    if layers_desired is None:
        layers_desired = range(raw_data.shape[1])
    else:
        if type(layers_desired) is not list():
            layers_desired = [layers_desired]

    for iii in obs_pts.index:

        if plot_bool:
            fig, ax = plt.subplots(1, 1, figsize=(8, 6))

        pt_r, pt_c = dis.get_rc_from_node_coordinates(obs_pts.loc[iii, 'geometry'].x,
                                                      obs_pts.loc[iii, 'geometry'].y,
                                                      local=False)

        for lay in layers_desired:

            pt_dat = []
            for sp in range(mf.dis.nper):

                pt_dat.append(raw_data[sp, lay, pt_r, pt_c])

            if plot_bool:
                ax.plot(pt_dat, label='layer '+str(lay))
                ax.legend()

            superlist.append(pt_dat)
            colnames.append(obs_pts.name[iii]+'_lay'+str(lay))

    df = pd.DataFrame(superlist).transpose()
    df.columns = colnames
    return df
Beispiel #13
0
 def read_ts(self, layer, row, column):
     try:
         ucn_obj = bf.UcnFile(filename=self._filename, precision='single')
         return ucn_obj.get_ts(idx=(layer, row, column)).tolist()
     except:
         return []
Beispiel #14
0
 def read_times(self):
     try:
         ucn_obj = bf.UcnFile(filename=self._filename, precision='single')
         return ucn_obj.get_times()
     except:
         return []
Beispiel #15
0
        cmuref=0.0,  # solute influence on viscocity
        invisc=-1,
        visc=-1,
        extension='vsc')
    mswtf.write_input()
    oc = mf.ModflowOc(mswtf)
    m = mswtf.run_model(silent=True)  # or silent = True // #Run SEAWAT
    '''Copy Modflow/MT3DMS output to new files so they wont be overwritten in next timestep.'''
    shutil.copyfile(os.path.join(dirs[0], name + '.hds'),
                    os.path.join(dirs[0], name + str(period) + '.hds'))
    shutil.copyfile(os.path.join(dirs[0], 'MT3D001.UCN'),
                    os.path.join(dirs[0], name + str(period) + 'S1' + '.UCN'))
    '''Create head & concentrations file object and read head & concentrations arrays for next simulation period'''
    h_obj = bf.HeadFile(os.path.join(dirs[0], name + str(period) + '.hds'))
    grid_obj.head = h_obj.get_data(totim=perlen)
    t_obj = bf.UcnFile(
        os.path.join(dirs[0], name + str(period) + 'S1' + '.UCN'))
    grid_obj.temp = t_obj.get_data(totim=perlen)

    if well_obj_list:
        for i in well_obj_list:  # Update each active Python well object with the temperature and head at its grid location
            i.H_modflow = grid_obj.head[i.L[-1], i.R, i.C]
            i.T_modflow = np.average(
                grid_obj.temp[i.start_idx:i.stop_idx, i.R, i.C]
            )  #the average of all the cells of the injection well! (start.idx and stop.idx)
    '''Save temp monitoring pointsdata to results array'''
    for m in range(len(mon_LRC_list)):
        RES[period, m] = grid_obj.temp[int(mon_LRC_list[m, 0]),
                                       int(mon_LRC_list[m, 1]),
                                       int(mon_LRC_list[m, 2])]
    '''save the info the the Run_output file'''
    for j in range(len(well_obj_list)):
swt.write_input()

# ## Run the model

success, buff = swt.run_model(silent=True, report=True)
if not success:
    raise Exception("SEAWAT did not terminate normally.")

# ## Post-process the results

import numpy as np
import flopy.utils.binaryfile as bf

# ### Load the concentration data

ucnobj = bf.UcnFile("MT3D001.UCN", model=swt)
times = ucnobj.get_times()
concentration = ucnobj.get_data(totim=times[-1])

# ### Load the cell-by-cell flow data

cbbobj = bf.CellBudgetFile("henry.cbc")
times = cbbobj.get_times()
qx = cbbobj.get_data(text="flow right face", totim=times[-1])[0]
qy = np.zeros((nlay, nrow, ncol), dtype=float)
qz = cbbobj.get_data(text="flow lower face", totim=times[-1])[0]

# ### Create a plot with concentrations and flow vectors

import matplotlib.pyplot as plt
ax3.grid()
ax3.set_xlim(-10, 375)
#ax3.set_ylim(1.45,2)
#ax3.set_ylim(0.00001,1000)
#ax3.set_yscale('log')
ax3.set_xlabel('Elapsed time [days]')
ax3.set_ylabel('Hydraulic head [m]')
titleText = 'Hydraulic heads: Layer %i' % lyr
ax3.set_title(titleText, loc='left')
ax3.legend(loc='upper right')

# ===== Basic concentration conditons =====================================
fig = plt.figure(figsize=(14, 10))

# Getting concentration data
ucnobj = bf.UcnFile('MT3D001.UCN')

#print(ucnobj.list_records()) # get values
times = ucnobj.get_times()  # simulation time
times_30d = times[29]
times_60d = times[59]
times_100d = times[99]
times_180d = times[179]
times_365d = times[364]

conc_30d = ucnobj.get_data(totim=times_30d)
conc_60d = ucnobj.get_data(totim=times_60d)
conc_100d = ucnobj.get_data(totim=times_100d)
conc_180d = ucnobj.get_data(totim=times_180d)
conc_365d = ucnobj.get_data(totim=times_365d)
Beispiel #18
0
    
         #   18) Run the model and measure the time
         t0 = time()
         #   run the model
         v = mswt.run_model(silent = False, report = True)
         for idx in range(-3, 0):
             print(v[1][idx])
         #   stop measuring time and calculate total run time
         t1 = time()
         run_time = t1 - t0
 
         #  19) Read model output; heads, concentrations and cell budget flow
         ml_results = flopy.modflow.Modflow.load(modelname + ".nam", model_ws = out_dir, verbose = False, check = False, exe_name = "mfnwt")
         hdsobj = bf.HeadFile(os.path.join(out_dir, modelname + '.hds'), model = ml_results)
         head = hdsobj.get_alldata()
         ucnobj = bf.UcnFile(os.path.join(out_dir, 'MT3D001.UCN'), model = ml_results)
         time_steps = ucnobj.get_times()
         conc = ucnobj.get_alldata()
         cbbobj = bf.CellBudgetFile(os.path.join(out_dir, modelname + '.cbc'))
         times_heads = cbbobj.get_times()       
         
         utm_lst = []
         
         #for i in x:
         #transform lat-lon to utm:
         for i in range(0,pt_x_lst.size):
             utm_lst.append(utm.from_latlon(float(pt_y_lst[i]), float(pt_x_lst[i])))
             
             utm_array = np.array(utm_lst)
             utm_array.shape