Example #1
0
	def maskSpace(self, maskf, variable, value=1):
		""" Set a grid mask

		Keyword arguments:

		maskf:*string*
		   NetCDF file containing the masking field

		variable:*string*
		   Variable in the NetCDF file that is the mask

		value:*int*
		   The masking value of the mask
		"""

		c = bc()
		if maskf is not None:
			try:
				f=DataFile(maskf, mode='r', open=True)
				#print "\n%sOpened spacial mask file %s, variable=%s, val=%s%s\n"%(c.yellow, maskf, variable, value, c.clear)
				mask = f.variables[variable][0][0]
				#print "type(mask)=%s, mask.shape=%s"%(type(mask), mask.shape)
				self.space = mask==value
				if type(self.space) != type(mask):
					print "%sError! self.space's type changed to %s%s"%(c.red, type(self.space), c.clear)
				f.close()
			except Exception as ex:
				print "%sSomething went wrong masking space with mask file %s (exists?: %r) %s%s"%(c.red, maskf, os.path.isfile(maskf), ex, c.clear)
				raise
Example #2
0
	def loadDims(filename):
		""" Load the dimensions from a netcdf file, then close the file.
		    Validator initializer also does this.

		Keyword Arguments:

		filename:*string*
		   Netcdf file name

		Returns:
		   dict of ni,nj,nk,nt
		"""

		conc = DataFile(filename, mode='r', open=True)
		dims={'ni': conc.dimensions['COL'], \
		'nj': conc.dimensions['ROW'], \
		'nk': conc.dimensions['LAY']}

		# TSTEP is unlimited, so python has problems reading it
		# So instead we'll examine the shape of a variable
		# Let's assume TFLAG exists
		shape = conc.variables['TFLAG'].shape
		# This first element is TSTEP
		dims['nt'] = shape[0]

		conc.close()

		return dims
Example #3
0
	def initForceFile(self, conc, fpath, species = None):
		""" Initialize a forcing file.
			This method opens the NetCDF file in read/write mode, copies the
			dimensions, copies I/O Api attributes over, and any other common
		    initialization that should be applied to all files

		Keyword Arguments:

		conc:*DataFile*
		   Concentration file to use as a template

		fpath:*string*
		   Path (dir and name) of the forcing file to initialize

		species:*string[]*
		   List of species to create

		Returns:

		NetCDFFile
		   Writable NetCDF File
		"""

		if not isinstance(conc, DataFile) or conc is None:
			raise ValueError("Concentration file must be a DataFile")

		# Make sure the concentration file is open
		conc.open()

		# Debug
		c=bc()

		# fpath should not exist
		if os.path.exists(fpath):
			# TEMP, remove
			os.remove(fpath)
			Forcing.debug("Deleted %s!"%(fpath))
			#raise IOError("%s already exists."%fpath)

		#print "Opening %s for writing"%fpath
		force = DataFile(fpath, mode='w', open=True)

		# Exceptions, so we don't needlessly create huge forcing files
		exceptions={'LAY': self.nk_f}
		Forcing.copyDims(conc, force, exceptions=exceptions)
		Forcing.copyIoapiProps(conc, force)

		if species is None:
			species = self.species

		# First, check the sample concentration file vs the concentration file
		try:
			#var = conc.variables['TFLAG'].getValue()
			var = conc.variables['TFLAG']
		except IOError as e:
			# Pointless try loop for now, but I'll add to it later if needed.
			raise

		if var.shape[0] != self.nt:
			#print "conc.shape = %s, sample.shape = %s"%(str(var.shape), str((self.nt, self.nk, self.nj, self.ni)))
			raise BadSampleConcException("Input file's dimensions (time steps) not not match those of the sample concentration file!  Cannot continue.")

		# Create the variables we'll be writing to
		#print "Initializing %s"%fpath
		for s in species:
			try:
				var = force.createVariable(s, 'f', ('TSTEP', 'LAY', 'ROW', 'COL'))
				z=np.zeros((self.nt,self.nk_f,self.nj,self.ni), dtype=np.float32)
				var[:,:,:,:] = z
				#Forcing.debug("Created zero variable %s in %s"%(s, force.basename))
			except (IOError, ValueError) as ex:
				print "%sWriting error %s%s when trying to create variable %s (%sTSTEP=%d, LAY=%d, ROW=%d, COL=%d%s)=%s%s%s in today's file.\n"%(c.red, type(ex), c.clear, s, c.blue, self.nt, self.nk_f, self.nj, self.ni, c.clear, c.orange, str(z.shape), c.clear), ex
				print "Current variable names: %s\n"%(" ".join(map(str, force.variables.keys())))

		# Copy over TFLAG
		vsrc = conc.variables['TFLAG'][:]

		force.createVariable('TFLAG', 'i', ('TSTEP', 'VAR', 'DATE-TIME'))
		vdest = force.variables['TFLAG']
		try:
			vdest[:]=vsrc[:]
		except (IOError, ValueError) as ex:
			print "%sWriting error %s%s when trying to write TFLAG variable"%(c.red, type(ex), c.clear)
			print "%sshape(vsrc)=%s, shape(vdest)=%s%s"%(c.cyan, str(vsrc.shape), str(vdest.shape), c.clear)
			raise


		## Fix geocode data
		## http://svn.asilika.com/svn/school/GEOG%205804%20-%20Introduction%20to%20GIS/Project/webservice/fixIoapiProjection.py
		## fixIoapiSpatialInfo

		# Sync the file before sending it off
		force.sync()

		# Close the files
		conc.close()
		force.close()

		# From now on, force will be read and written to, so change the mode
		force.mode='a'

		return force
Example #4
0
class ForcingValidator:

	LAY_SURFACE_NAME='Surface'

	ni=None
	nj=None
	nk=None
	ns=None
	nt=None

	conc = None

	def __init__(self,filename):
		self.conc=DataFile(filename, mode='r', open=True)

		self.ni = self.conc.dimensions['COL']
		self.nj = self.conc.dimensions['ROW']
		self.nk = self.conc.dimensions['LAY']

		# TSTEP is unlimited, so python has problems reading it
		# So instead we'll examine the shape of a variable
		# Let's assume TFLAG exists
		shape = self.conc.variables['TFLAG'].shape
		# This first element is TSTEP
		self.nt = shape[0]

		ns = len(self.conc.variables.keys())


	def close(self):
		try:
			self.conc.close()
		except IOError:
			# nothing.. it's closed.
			self.conc = None

	def __del__(self):
		self.close()

	def changeFile(self, newfile):
		self.conc.close();
		self.conc=DataFile(newfile, mode='r', open=True)

	def getDate(self):
		""" Again, not a validator just a getter.  Useful to know the date
		    of the concentration file being used.  Since we're using an
		    I/O Api file, we'll look at the SDATE attribute.

		Returns:

		datetime
		"""

		self.conc.loadDate()
		return self.conc.date

#		# Get the sdate, in the format YYYYJJJ
#		if not hasattr(self.conc, 'SDATE'):
#			raise IOError("Sample concentration file does not seem to be a proper I/O Api file.")
#
#		sdate=str(getattr(self.conc, 'SDATE'))
#		# Sometimes sdate has brackets around it
#		if sdate[0] == "[" and sdate[-1] == "]":
#			sdate=sdate[1:-1]
#		year=int(sdate[:4])
#		jday=int(sdate[4:])
#
#		date = datetime.date(year, 1, 1)
#		days = datetime.timedelta(days=jday-1) # -1 because we started at day 1
#		date=date+days
#
#		return date

	def getLayers(self):
		"""Return a list of layers.  This isn't really a validator, but
		it shares a lot of the functionality.  Assumes that there's
		always a ground layer.

		Returns:
		list of layers
		"""

		num_layers = self.conc.dimensions['LAY']
		layers=[self.LAY_SURFACE_NAME]
		for l in range(2, num_layers):
			layers+=str(l)
		return layers

	def getTimes(self):
		"""Return a list of times(hours).  This isn't really a validator, but
		it shares a lot of the functionality.  Assumes that there's
		always a ground layer.

		Returns:
		list of times
		"""

		shape = self.conc.variables['TFLAG'].shape
		nt = shape[0]

		times=list(xrange(nt))
		# Cut off the 25th time
		for t in range(0, nt-1):
			times[t]=str(t)
		return times

	def getSpecies(self):
		"""Return a list of species.  This isn't really a validator, but
		it shares a lot of the functionality

		Returns:
		list of species
		"""
		vars = self.conc.variables.keys()
		for i, var in enumerate(vars):
			vars[i]=var.upper()

		vars=sorted(vars)

		pri_vars = []
		normal_vars = []

		# Put some of the important ones up top
		for var in vars:
			# Select case basically
			if var in ['O3', 'NO', 'NO2']:
				pri_vars.append(var)
			elif var == "TFLAG":
				continue
			else:
				normal_vars.append(var)

		return pri_vars+normal_vars


	# Check to ensure all the chosen species are available 
	# Species is a string vector
	def validateSpecies(self, species):
		"""Validate species against a sample datafile variables

		Keyword Arguments:
		species -- Vector of species names to use

		Raises:
		ValidationError -  if invalid species is input

		Returns:
		TRUE if valid, false otherwise
		"""

		#print "Got species", '[%s]' % ', '.join(map(str, species))

		vars = self.conc.variables.keys()
		for i in range(0, len(vars)):
			vars[i]=vars[i].upper()

		notFound=[]

		for s in species:
			found=False
			for v in vars:
				if v == s:
					found=True
					break
			if found == False:
				notFound.append(s)

		if len(notFound)>0:
			raise ValidationError("Invalid species: ", '[%s]' % ', '.join(map(str, notFound)))
			return False;
		return True


	def validateLayers(self,layers):
		"""Validate layers against a sample datafile file

		Keyword Arguments:
		layers -- Vector of layers to use

		Raises:
		ValidationError -  if invalid layer is input

		Returns:
		TRUE if valid, false otherwise
		"""

		num_layers = self.conc.dimensions['LAY']
		for l in layers:
			if l > num_layers:
				raise ValidationError("Invalid layer: ", l)
				return False
		return True

	def validateTimes(self,times):
		"""Validate times against a sample datafile file

		Keyword Arguments:
		times -- Vector of times to use

		Raises:
		ValidationError -- if invalid times step is input

		Returns:
		TRUE if valid, false otherwise
		"""

		# Not yet implemented
		return True

	def ValidateDataFileSurface(self, filename):
		""" Validates a datafile by checking if it's 2D surface domani
			(ni,nj) matches the sample file """

		datafile=DataFile(filename, mode='r', open=True)

		#print "COL %d, self.ni: %d   -  ROW: %d, self.nj: %d"%(datafile.dimensions['COL'], self.ni, datafile.dimensions['ROW'], self.nj)
		return datafile.dimensions['COL'] == self.ni and datafile.dimensions['ROW'] == self.nj
	def loadScalarField(self):
		""" Open up the mortality and population files and read
		their values.  Generate a field to multiply forcing by.

		Forcing = F * Pop * Mortality * VSL
		"""

		if self.beta is None:
			raise ForcingException("Must supply concentration response factor")

		if self._mortality_fname is None or self._mortality_var is None:
			raise ForcingException("Must supply mortality file")

		if self._pop_fname is None or self._pop_var is None:
			raise ForcingException("Must supply population file")

		# This is optional
		#if self.vsl is None:
		#	raise ForcingException("Must specify statistical value of life (in millions)")

		# Open the mortality file
		try:
			mortality = DataFile(self._mortality_fname, mode='r', open=True)
		except IOError as ex:
			Forcing.error("Error!  Cannot open mortality file %s.  File exists? %r"%(self._mortality_fname, os.path.isfile(self._mortality_fname)))
			raise

		# Check dimensions
		if not (mortality.dimensions['COL'] == self.ni and mortality.dimensions['ROW'] == self.nj):
			raise ValueError("Error, dimensions in mortality file %s do not match domain."%self._mortality_fname)

		# Read the field
		try:
			# dims are TSTEP, LAY, ROW, COL.. so skip TSTEP and LAY
			# this should be made more general, or the file should be made better.
			mfld = mortality.variables[self._mortality_var][0][0]
		except IOError as e:
			raise e
		except IndexError as e:
			raise ForcingFileDimensionException("Mortality NetCDF file seems to have incompatible dimensions.  Currently require shape (TSTEP, LAY, ROW, COL).  This is marked to be improved, as the data does not vary with time or layer.")

		# Close the file
		if self._pop_fname != self._pop_fname:
			mortality.close()

			# Open the population file
			try:
				pop = DataFile(self._pop_fname, mode='r', open=True)
			except IOError as ex:
				Forcing.error("Error!  Cannot open population file %s"%(self._pop_fname))
				raise

			# Check dimensions
			if not (pop.dimensions['COL'] == self.ni and pop.dimensions['ROW'] == self.nj):
				raise ValueError("Error, dimensions in population file %s do not match domain."%self._pop_fname)
		else:
			# Same file?
			pop = mortality

		# Read the field
		try:
			# dims are TSTEP, LAY, ROW, COL.. so skip TSTEP and LAY
			pfld = pop.variables[self._pop_var][0][0]
		except IOError as e:
			raise e
		except IndexError as e:
			raise ForcingFileDimensionException("Population NetCDF file seems to have incompatible dimensions.  Currently require shape (TSTEP, LAY, ROW, COL).  This is marked to be improved, as the data does not vary with time or layer.")


		pop.close()

		# Debug, remember, when debugging this against plotted data or fortran
		# code: values like (70,70) started at index 1 whereas we started at
		# index 0, so (70,70)=(69,69)
		#print "[j=%d,i=%d] = mfld * mfld_scale * pfld * self.beta / 365 = %e %e %e %e %e = %e"%(self.debug_j, self.debug_i, mfld[self.debug_j,self.debug_i], (10.**-4), pfld[self.debug_j,self.debug_i], self.beta, 365.0, mfld[self.debug_j,self.debug_i]*(10.**-4)*pfld[self.debug_j,self.debug_i]*self.beta/365.0)

		# (mfld * pfld) is element wise multiplication, not matrix multiplication
		# Take leap years into account?
		Forcing.debug("[TODO]: Leap years are not yet accounted for.")
		self.timeInvariantScalarMultiplcativeFld = mfld * self.mort_scale / 365.0 * pfld * self.beta
		if self.vsl is not None:
			self.timeInvariantScalarMultiplcativeFld = self.timeInvariantScalarMultiplcativeFld * self.vsl