def maskSpace(self, maskf, variable, value=1): """ Set a grid mask Keyword arguments: maskf:*string* NetCDF file containing the masking field variable:*string* Variable in the NetCDF file that is the mask value:*int* The masking value of the mask """ c = bc() if maskf is not None: try: f=DataFile(maskf, mode='r', open=True) #print "\n%sOpened spacial mask file %s, variable=%s, val=%s%s\n"%(c.yellow, maskf, variable, value, c.clear) mask = f.variables[variable][0][0] #print "type(mask)=%s, mask.shape=%s"%(type(mask), mask.shape) self.space = mask==value if type(self.space) != type(mask): print "%sError! self.space's type changed to %s%s"%(c.red, type(self.space), c.clear) f.close() except Exception as ex: print "%sSomething went wrong masking space with mask file %s (exists?: %r) %s%s"%(c.red, maskf, os.path.isfile(maskf), ex, c.clear) raise
def log(self, msg, level): """ Basic logging function, intended to be called by self.debug(), or self.warn(), etc.. """ c = bc() if level == self.LOG_ERROR: prefix='E' mc=c.red rc=(255,0,0) elif level == self.LOG_WARN: prefix='W' mc=c.yellow rc=(218,165,32) elif level == self.LOG_HELP: prefix='H' mc=c.clear rc=(0,0,0) elif level == self.LOG_INFO: prefix='I' mc=c.blue rc=(0,0,200) elif level == self.LOG_DEBUG: prefix = 'D' mc=c.green rc=(0,200,0) else: prefix='U' mc=c.clear rc=(0,0,0) coloured_msg = "[%s] %s%s%s"%(prefix, mc, msg, c.clear) plain_msg = "[%s] %s\n"%(prefix, msg) self.logger.BeginTextColour(rc) # Implement this stuff ( http://stackoverflow.com/questions/153989/how-do-i-get-the-scroll-position-range-from-a-wx-textctrl-control-in-wxpython/155781#155781 ) to make scroll bar always go to the bottom self.logger.WriteText(plain_msg) self.logger.BeginTextColour((0,0,0)) print coloured_msg
#!/usr/bin/env python from DoForce import * import numpy as np import sys, os from Scientific.IO.NetCDF import NetCDFFile # This is mostly for debugging.. Just ansi colours from bcolours import bcolours as bc c = bc() def printVec(vec, c, cstr): outs="" for i in range(0, len(vec)): v=vec[i] if v > 0: outs=outs+"%s%4.2f%s"%(c.red, v, cstr) else: outs=outs+"%4.2f"%(v) if i<len(vec)-1: outs=outs+" " return outs # # Cell of choice winLen=8 #debug_i=2 #debug_j=2
def prepareTimeVectorForAvg(vecs, timezone=0, winLen=8, forwards_or_backwards = default_averaging_direction, debug=False): """ Prepare a vector for a sliding window. Given a vector of values for three days (yesterday, today and tomorrow), this considers the direction of your averaging (8 hours forwards, backwards, or maybe central one day) and outputs a vector that can be used for averaging. So, given:: vecs={-1: yesterday, 0: today, 1: tomorrow} # where, yesterday=[0 ... 0] (24 elements) today =[0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 ... 0] (24 elements) tomorrow =[0 ... 0] (24 elements) winLen=8 timezone=0 forwards_or_backwards=forwards This function will then return a 32 element vector consisting of [today tomorrow[0:8]] If however *timezone* = -5, then this function will return:: [yesterday[-5:] today tomorrow[0:3] Keywords: vec*dict[[]:*numpy.ndarray* keys are the offset of the day (so, -1 is yesterday), values are 24 element vectors starting at index 0 timezone:*int* Shift the vector to reflect your timezone winLen:*int* the size of the window forwards_or_backwards:*bool* True means set it up for a forward avg debug:*bool* Temporary variable that outputs the vector colour coded. Green for parts from yesterday, blue for today and orange for tomorrow Returns: *ndarray((24+winLen), dtype=float32)* The values to be averaged adjusted for whether we're calculating forwards or backwards. """ for d,v in vecs.iteritems(): if len(v) != 24: raise ValueError("Vector %d has an invalid length. Vectors must have a length of %d, given len=%d"%(d, Forcing.dayLen, len(yesterday))) data=np.concatenate(vecs.values(), axis=1) #Forcing.debug("Combined Data:\n%s "%', '.join(map(str, data))) if timezone > 0: raise NotImplementedError("Error! Positive timezones are not yet implemented.") # Calculate how many days were given before "today" days_before_today = 0 for d in vecs.keys(): if d<0: days_before_today=days_before_today + 1 else: break if forwards_or_backwards: # Moving forward # Index should start at today. So, 24*number of vectors before day 0 idx_start=days_before_today*Forcing.dayLen # Ending index must span at least 1 day + winLen idx_end = idx_start+Forcing.dayLen+winLen-1 else: # Moving backward # We need all of today, plus a window length's into yesterday idx_start=(days_before_today*Forcing.dayLen) - winLen+1 idx_end = (days_before_today+1)*Forcing.dayLen #Forcing.debug("idx_start=%d, idx_end=%d, diff=%d"%(idx_start, idx_end, idx_end-idx_start)) # Apply time zone i.e. Montreal is -5 if math.floor(timezone) != timezone: raise NotImplementedError("Timezone must be an integer. Fractional timezones (e.g. Newfoundland) is not yet supported.") idx_start = int(idx_start - timezone) idx_end = int(idx_end - timezone) if idx_end > len(data): raise ValueError("Averaging window is outside of the three day data that was predicted! Cannot continue.") #print "idx_start=%d , idx_end=%d, diff=%d, timezone=%d"%(idx_start, idx_end, idx_end-idx_start, timezone) vec = data[idx_start:idx_end] ### # Debug stuff ### if debug: # Colours b = bc() # Create coloured ints, and re-create the list cdata=[] for d,ve in vecs.iteritems(): for val in ve: cdata.append(ci(val, getattr(b, "_%s"%DayIterator.clabels[d]))) # Slice the list the same way cvec = cdata[idx_start:idx_end] print "Preped vec(len=%d) = %s"%(len(cvec), " ".join(map(str, cvec))) same=True for idx, val in enumerate(vec): if val != cvec[idx].val: same=False break if not same: print "%sError!!%s The debug vector does not match the actual returned vector"%(b.red, b.clear) print "Act vec(len=%d) = %s\n"%(len(vec), ' '.join('%4.3f' % v for v in vec)) ### # /Debug stuff ### return vec
def initForceFile(self, conc, fpath, species = None): """ Initialize a forcing file. This method opens the NetCDF file in read/write mode, copies the dimensions, copies I/O Api attributes over, and any other common initialization that should be applied to all files Keyword Arguments: conc:*DataFile* Concentration file to use as a template fpath:*string* Path (dir and name) of the forcing file to initialize species:*string[]* List of species to create Returns: NetCDFFile Writable NetCDF File """ if not isinstance(conc, DataFile) or conc is None: raise ValueError("Concentration file must be a DataFile") # Make sure the concentration file is open conc.open() # Debug c=bc() # fpath should not exist if os.path.exists(fpath): # TEMP, remove os.remove(fpath) Forcing.debug("Deleted %s!"%(fpath)) #raise IOError("%s already exists."%fpath) #print "Opening %s for writing"%fpath force = DataFile(fpath, mode='w', open=True) # Exceptions, so we don't needlessly create huge forcing files exceptions={'LAY': self.nk_f} Forcing.copyDims(conc, force, exceptions=exceptions) Forcing.copyIoapiProps(conc, force) if species is None: species = self.species # First, check the sample concentration file vs the concentration file try: #var = conc.variables['TFLAG'].getValue() var = conc.variables['TFLAG'] except IOError as e: # Pointless try loop for now, but I'll add to it later if needed. raise if var.shape[0] != self.nt: #print "conc.shape = %s, sample.shape = %s"%(str(var.shape), str((self.nt, self.nk, self.nj, self.ni))) raise BadSampleConcException("Input file's dimensions (time steps) not not match those of the sample concentration file! Cannot continue.") # Create the variables we'll be writing to #print "Initializing %s"%fpath for s in species: try: var = force.createVariable(s, 'f', ('TSTEP', 'LAY', 'ROW', 'COL')) z=np.zeros((self.nt,self.nk_f,self.nj,self.ni), dtype=np.float32) var[:,:,:,:] = z #Forcing.debug("Created zero variable %s in %s"%(s, force.basename)) except (IOError, ValueError) as ex: print "%sWriting error %s%s when trying to create variable %s (%sTSTEP=%d, LAY=%d, ROW=%d, COL=%d%s)=%s%s%s in today's file.\n"%(c.red, type(ex), c.clear, s, c.blue, self.nt, self.nk_f, self.nj, self.ni, c.clear, c.orange, str(z.shape), c.clear), ex print "Current variable names: %s\n"%(" ".join(map(str, force.variables.keys()))) # Copy over TFLAG vsrc = conc.variables['TFLAG'][:] force.createVariable('TFLAG', 'i', ('TSTEP', 'VAR', 'DATE-TIME')) vdest = force.variables['TFLAG'] try: vdest[:]=vsrc[:] except (IOError, ValueError) as ex: print "%sWriting error %s%s when trying to write TFLAG variable"%(c.red, type(ex), c.clear) print "%sshape(vsrc)=%s, shape(vdest)=%s%s"%(c.cyan, str(vsrc.shape), str(vdest.shape), c.clear) raise ## Fix geocode data ## http://svn.asilika.com/svn/school/GEOG%205804%20-%20Introduction%20to%20GIS/Project/webservice/fixIoapiProjection.py ## fixIoapiSpatialInfo # Sync the file before sending it off force.sync() # Close the files conc.close() force.close() # From now on, force will be read and written to, so change the mode force.mode='a' return force
def produceForcingField(self, progress_callback = None, dryrun = False, debug=False): """ Iterate through concentration files, create forcing output netcdf files, prepare them, and call the writing function. Using the NetCDF 4 library, we could open all the files at opnce, but I don't trust that I/O Api's odd proprietary format would allow the library to properly sort it. This is something to investigate in the future. Keyword Arguments: progressWindow:*ProgressFrame* progress_callback:*function* Used to send back progress information. It'll call:: progressWindow.progress_callback(percent_progress:float, current_file:Datafile) """ c = bc() Forcing.debug("Processing... Domain=(ns=%d, nt=%d, nk=%d, ni=%d, nj=%d)"%(len(self.species), self.nt, self.nk, self.ni, self.nj)) # # Iterate through concentration files # In order to keep the code below clean, all the files are # initiate in this first loop inputs = [] outputs = [] for conc in self.conc_files: inputs.append(conc) force_path=self.generateForceFileName(conc) # Initialize the forcing file try: force = self.initForceFile(conc, force_path) except IOError as ex: Forcing.warn("Error! %s already exists. Please remove the forcing file and try again."%force_path) # HACK TEMP, remove os.remove(force_path) except BadSampleConcException as ex: Forcing.error("Error!") raise outputs.append(force) # Clean up and close the files # End of initiation loop # Create file iterator iterator = DayIterator(inputs, outputs) rdays = iterator.recommendDaysToCarry(timeZoneFld=self.griddedTimeZoneFld, averaging_window=self.averaging_window) Forcing.log("For current setup, using days: %s"%(", ".join("'%s'"%iterator.labels[l].strip() for l in rdays))) # Index of concentration file for conc_idx in range(0, len(iterator)): #print "conc_idx = %d"%conc_idx # Should re-think where this dry-run goes and what it's for if not dryrun: # Grab the files inputs, outputs = iterator[rdays] # What days are we working with? if debug: Forcing.log("\n") for key,f in outputs.iteritems(): if f is not None: bn=f.basename else: bn="None" Forcing.debug("%s: %s"%(iterator.labels[key], bn)) Forcing.log("\n") # Generate a dict of day vectors, e.g. {-1: yesterday, 0: today, 1: tomorrow] # where every "day" is a list with species indices (from self.species) for # keys, and values of the domain (ni*nj*nk) for that species flds = self.generateForcingFields(conc_idx=conc_idx, inputs=inputs, outputs=outputs) # Flds[day] is now a ndarray[species][nt][nk][nj][ni] idx_s = 0 for idx_s, species in enumerate(self.species): #Forcing.debug("Using species index idx_s: %d = species %s"%(idx_s, species)) # Get the netcdf variables # Get the values # add the values # write them back to the file if debug: Forcing.log("\n%si=%d, j=%d, k=0, t=:24"%(c.HEADER, self.debug_i, self.debug_j)) Forcing.log("GMT: %s\n"%(' '.join('%4.0d' % v for v in range(1,25)))) for d,f in outputs.iteritems(): # Our forcing variable (read from the file) #print "f.variables [%r] (%s): "%(f._isOpen, f.basename), f.variables._vars fvar = f.variables[species] # Our base field (what's already in the file) base_fld = fvar[:] # Overlay the new field to it (additive) sum_fld = fvar[:] + flds[d][idx_s] # Assign the field to the DataFile fvar[:] = sum_fld f.sync() f.close() inputs[d].close() if debug: l = iterator.labels[d] cl = c.light(iterator.clabels[d]) cn = getattr(c, iterator.clabels[d]) cd = c.dark(iterator.clabels[d]) if d == -1: col=c.light('yesterday') Forcing.debug("%s%sb: %s"%(cl, l, Forcing.printVec(base_fld[:24,0,self.debug_j,self.debug_i], c, cl))) Forcing.debug("%s%s: %s"%(cn, l, Forcing.printVec(flds[d][idx_s][:24,0,self.debug_j,self.debug_i], c, cn))) Forcing.debug("%s%ss: %s"%(cd, l, Forcing.printVec(sum_fld[:24,0,self.debug_j,self.debug_i], c, cd))) Forcing.debug("\n") # endfor outputs #endfor species # Move the iterator ahead one iterator.next() # endif dryrun # Perform a call back to update the progress progress_callback(float(conc_idx)/len(inputs), self.conc_files[conc_idx])
def output(colour, msg): c=bc() if colour == 0: colour = c.colours['clear'] print "%s%s%s"%(c.ansiColour(colour), msg.strip(), c.clear)
i=72 j=19 print "Cell i=%d j=%d"%(i,j) # Timezone? tzf=os.environ['HOME'] + "/cmaq_forcing/forcing/src/GriddedTimeZoneMask.nc" src=NetCDFFile(tzf, 'r') # Get the variable var = src.variables['LTIME'] data=var.getValue()[0][0] print "Timezone: %d"%(data[j][i]) # Colours b = bc() # Copy over the value src.close() allfiles=[] #cbase=os.environ['HOME'] + "/cmaq_forcing/forcing/src/morteza/" cbase='/mnt/mediasonic/opt/output/base/' cfiles=["CCTM_fwdACONC.20070501", "CCTM_fwdACONC.20070502", "CCTM_fwdACONC.20070503", "CCTM_fwdACONC.20070504"] for f in cfiles: allfiles.append(cbase+f) ffiles=["CCTM_fwdFRC.20070501", "CCTM_fwdFRC.20070502", "CCTM_fwdFRC.20070503", "CCTM_fwdFRC.20070504"] fbase='/mnt/mediasonic/opt/output/morteza/frc-8h-US/' for f in ffiles: