Beispiel #1
0
    def get_stack(self, image_data, outfile=None, mask_path=None):

        stack = None
        if mask_path:
            self.mask = self._get_mask_from_raster(mask_path)

        if isinstance(image_data, str):
            self.saved_array = image_data
            stack = load(image_data)

        elif isinstance(image_data, ImageStack):
            self.data = image_data
            stack = self._get_stack_channels()

        if outfile:
            print('saving image stack {}'.format(outfile))
            save(outfile, stack)

        self.final_shape = 1, stack.shape[1], stack.shape[2]
        stack = stack.reshape((stack.shape[0], stack.shape[1] * stack.shape[2]))
        stack[stack == 0.] = np.nan

        if mask_path:
            ms = self.mask.shape
            msk = np.repeat(self.mask.reshape((ms[0], ms[1] * ms[2])), stack.shape[0], axis=0)
            stack = marray(stack, mask=msk)

        self.masked_data_stack = marray(stack, mask=np.isnan(stack))
        self.n = self.masked_data_stack.shape[0]
        del stack

        self.new_array = zeros((1, self.masked_data_stack.shape[1]), dtype=float16)
Beispiel #2
0
    def run(self):
        if not self.vars:
            print 'depthManipNC:\tERROR:\tvariables to save are no use:', self.vars
            return
        if not exists(self.fni):
            print 'depthManipNC:\tERROR:\tinputfile name does not exists:', self.fni
            return

        if self.depthFlags == '':
            print 'depthManipNC:\tWARNING:\tNo depth flags given, assuming surface values only.'
            self.depthFlags = zeros(len(self.vars), dtype=int)

        if len(self.vars) != len(self.depthFlags):
            print 'depthManipNC:\tERROR:\tVariables do not match depth flags:', len(
                self.vars), '!=', len(self.depthFlags)
            return
        self.varflag = {}
        for var, flag in zip(self.vars, self.depthFlags):
            self.varflag[var] = flag

        if self.debug:
            print 'depthManipNC:\tINFO:\topening dataset:\t', self.fni
        nci = Dataset(self.fni, 'r')  #Quiet =True)
        #if self.depthFlags and 'zbnd' not in nci.variables.keys():
        #	print 'depthManipNC:\tERROR:\tdepthFlags is ',self.depthFlags,'but inputfile name does contain \'zbnd\''
        #	return

        #check that there are some overlap between input vars and nci:
        for v in self.vars:
            if v in nci.variables.keys(): continue
            print 'depthManipNC:\tERROR:\tvariable,', v, ', not found in ', self.fni
            return

        #create dataset and header.
        if self.debug:
            print 'depthManipNC:\tINFO:\tCreating a new dataset:\t', self.fno
        nco = Dataset(self.fno, 'w')
        for a in nci.ncattrs():
            if self.debug:
                print 'depthManipNC:\tINFO:\tcopying attribute: \t\"' + a + '\":\t', nci.getncattr(
                    a)
            nco.setncattr(a, nci.getncattr(a))
        appendToDesc = 'Reprocessed on ' + todaystr() + ' by ' + getuser(
        ) + ' using depthManipNC.py'
        try:
            nco.Notes = nci.Notes + '\n\t\t' + appendToDesc
        except:
            nco.Notes = appendToDesc

        # list of variables to save, assuming some conventions
        save = list(
            set(nci.variables.keys()).intersection(set(self.alwaysInclude)))
        save = list(set(sorted(save + self.vars)))

        # create dimensions:
        for d in nci.dimensions.keys():
            if d in [
                    'time',
            ]: nco.createDimension(d, None)
            elif d in [
                    'depth',
                    'z',
            ]: nco.createDimension(d, 1)
            else: nco.createDimension(d, len(nci.dimensions[d]))

        # create Variables:
        for var in save:
            nco.createVariable(var,
                               nci.variables[var].dtype,
                               nci.variables[var].dimensions,
                               zlib=True,
                               complevel=5)

        # Long Names:
        for var in save:
            varln = ''
            long_name = ''
            try:
                long_name = nci.variables[var].long_name
            except:
                if self.debug:
                    print 'depthManipNC:\tWarning:\tNo long_name for ', var
            if var in self.vars:
                long_name += ' ' + self.depthStrings[str(self.varflag[var])]
            if self.timemean: long_name.replace('Daily', '')
            if self.timemean: long_name.replace('Monthly', '')
            nco.variables[var].long_name = long_name
            if self.debug:
                print 'depthManipNC:\tInfo:\tAdding long_name:', var, long_name
        # Units:
        for var in save:
            units = ''
            try:
                units = nci.variables[var].units
            except:
                if self.debug:
                    print 'depthManipNC:\tWarning:\tNo units for ', var
            if var in self.vars:
                if self.varflag[var] == 1: units = units.replace('m^3', 'm^2')

            nco.variables[var].units = units
            if self.debug:
                print 'depthManipNC:\tInfo:\tAdding units:', var, units

        if 'zbnd' in nci.variables.keys(): self.zbnd = nci.variables['zbnd'][:]
        if 'bathymetry' in nci.variables.keys():
            self.bathy = nci.variables['bathymetry'][:]

        # Fill Values:
        for var in save:
            if var not in self.vars:  #no change
                arr = nci.variables[var][:]
            else:
                flag = self.varflag[var]
                if self.debug:
                    print 'depthManipNC:\tInfo:\tFilling var:', var, 'flag:', flag
                if flag == 1:
                    arr = (nci.variables[var][:] * abs(
                        (self.zbnd[:, :, :, :, 1] - self.zbnd[:, :, :, :, 0]))
                           ).sum(1)
                    arr = arr[:, None, :, :]
                elif flag in [-2, -1, 0]:
                    arr = nci.variables[var][:, flag, :, :]
                elif flag in [
                        -15,
                ]:
                    arr = self.bottomLayer(nci, var)
                else:
                    arr = nci.variables[var][:, flag, :, :].mean(1)
                    arr = arr[:, None, :, :]

                #while len(arr.shape) < len(nci.variables[var].dimensions): arr = marray(arr[None,:])

            timeav = False
            if self.timemean and len(
                    intersection(['time', 't'],
                                 nci.variables[var].dimensions)):
                timeav = True
            if timeav:
                if self.debug:
                    print 'depthManipNC:\tInfo:\tSaving time averaged var:', var
                arr = marray([
                    arr.mean(0),
                ])
                while len(arr.shape) < len(nci.variables[var].dimensions):
                    arr = marray(arr[None, :])

            if self.debug:
                print 'depthManipNC:\tInfo:\tSaving var:', var, arr.shape, '\tdims:', nci.variables[
                    var].dimensions
            nco.variables[var][:] = arr

        # Close netcdfs:
        nco.close()
        nci.close()
        if self.debug:
            print 'depthManipNC:\tINFO:\tsuccessfully created:\t', self.fno
        return
Beispiel #3
0
    def run(self):
        if not self.vars:
            print 'pruneNC:\tERROR:\tvariables to save are no use:', self.vars
            return
        if not exists(self.fni):
            print 'pruneNC:\tERROR:\tinputfile name does not exists:', self.fni
            return

        nci = Dataset(self.fni, 'r')  #Quiet =True)

        if self.depthInt:
            print 'FAIL: maybe you should look at the depthManip.py class instead. This one only removes variables from a netcdf.'
            return

        #check that there are some overlap between input vars and nci:
        for v in self.vars:
            if v in nci.variables.keys(): continue
            print 'pruneNC:\tERROR:\tvariable,', v, ', not found in ', self.fni
            return

        #create dataset and header.
        if self.debug:
            print 'pruneNC:\tINFO:\tCreating a new dataset:\t', self.fno
        nco = Dataset(self.fno, 'w')
        for a in nci.ncattrs():
            if self.debug:
                print 'pruneNC:\tINFO:\tcopying attribute: \t\"' + a + '\":\t', nci.getncattr(
                    a)
            nco.setncattr(a, nci.getncattr(a))
        appendToDesc = 'Reprocessed on ' + todaystr() + ' by ' + getuser(
        ) + ' using pruneNC.py'
        try:
            nco.Notes = nci.Notes + '\n\t\t' + appendToDesc
        except:
            nco.Notes = appendToDesc

        # list of variables to save, assuming some conventions

        save = list(set(nci.variables.keys()).intersection(set(alwaysInclude)))
        save = list(set(sorted(save + self.vars)))

        # create dimensions:
        for d in nci.dimensions.keys():
            if d in [
                    'time',
            ]: nco.createDimension(d, None)
            else: nco.createDimension(d, len(nci.dimensions[d]))

        # create Variables:
        for var in save:
            nco.createVariable(var,
                               nci.variables[var].dtype,
                               nci.variables[var].dimensions,
                               zlib=True,
                               complevel=5)

        # Long Names:
        for var in save:
            try:
                long_name = nci.variables[var].long_name
            except:
                print 'pruneNC:\tWarning:\tNo long_name for ', var
                long_name = var

            #if self.timemean: long_name.replace('Daily', 'Monthly')
            nco.variables[var].long_name = long_name
            if self.debug:
                print 'pruneNC:\t Adding long_name for ', var, long_name

        # Units:
        for var in save:
            try:
                nco.variables[var].units = nci.variables[var].units
            except:
                print 'pruneNC:\tWarning:\tNo units for ', var

        # Fill Values:
        for var in save:
            if self.debug: print 'pruneNC:\tINFO:\tCopying ', var, ' ...'
            arr = nci.variables[var][:]

            if len(intersection(['time', 't'], nci.variables[var].dimensions)):
                #####
                # Take the time mean of the whole file
                if self.timemean:
                    if self.debug:
                        print 'pruneNC:\tInfo:\tSaving time averaged var:', var
                    arr = marray([
                        arr.mean(0),
                    ])
                    #while len(arr.shape) < len(nci.variables[var].dimensions): arr = marray(arr[None,:])
                if self.timeindex == None: pass
                else:
                    #####
                    # time index is an integer
                    if type(self.timeindex) in [type(1), type(1.)]:
                        arr = marray(arr[self.timeindex])
                        if self.debug:
                            print "pruneNC:\t", self.timeindex, arr.shape
                    #####
                    # time index is a slice
                    if type(self.timeindex) == type(slice(
                            0,
                            1,
                    )):
                        arr = marray(arr[self.timeindex]).mean(0)
                # make sure it's the right shaped array
                while len(arr.shape) < len(nci.variables[var].dimensions):
                    arr = marray(arr[None, ...])

            if self.debug:
                print 'pruneNC:\tInfo:\tSaving var:', var, arr.shape, '\tdims:', nci.variables[
                    var].dimensions
            nco.variables[var][:] = arr
        # Close netcdfs:
        nco.close()
        nci.close()
        if self.debug:
            print 'pruneNC:\tINFO:\tsuccessfully created:\t', self.fno
        return
Beispiel #4
0
  def run(self):	
	if not self.vars:
		print 'depthManipNC:\tERROR:\tvariables to save are no use:', self.vars
		return
	if not exists(self.fni):
		print 'depthManipNC:\tERROR:\tinputfile name does not exists:', self.fni
		return
	
	if self.depthFlags =='':
		print 'depthManipNC:\tWARNING:\tNo depth flags given, assuming surface values only.'
		self.depthFlags = zeros(len(self.vars),  dtype=int)
		
	if len(self.vars)!= len(self.depthFlags):
		print 'depthManipNC:\tERROR:\tVariables do not match depth flags:', len(self.vars), '!=', len(self.depthFlags)
		return
	self.varflag={}
	for var,flag in zip(self.vars, self.depthFlags):self.varflag[var] = flag
	
	if self.debug: print 'depthManipNC:\tINFO:\topening dataset:\t', self.fni	
	nci = Dataset(self.fni,'r')#Quiet =True)
	#if self.depthFlags and 'zbnd' not in nci.variables.keys():
	#	print 'depthManipNC:\tERROR:\tdepthFlags is ',self.depthFlags,'but inputfile name does contain \'zbnd\''
	#	return
	
	#check that there are some overlap between input vars and nci:
	for v in self.vars:
		if v in nci.variables.keys():continue
		print 'depthManipNC:\tERROR:\tvariable,' ,v,', not found in ',self.fni
		return
		
	#create dataset and header.
	if self.debug: print 'depthManipNC:\tINFO:\tCreating a new dataset:\t', self.fno
	nco = Dataset(self.fno,'w')
	for a in nci.ncattrs():
		if self.debug: print 'depthManipNC:\tINFO:\tcopying attribute: \t\"'+a+'\":\t', nci.getncattr(a)
		nco.setncattr(a,nci.getncattr(a))	
	appendToDesc= 'Reprocessed on '+todaystr()+' by '+getuser()+' using depthManipNC.py'
	try: nco.Notes = nci.Notes + '\n\t\t'+appendToDesc
	except: nco.Notes = appendToDesc
	
	# list of variables to save, assuming some conventions
	alwaysInclude = ['time', 'lat','lon', 'latbnd', 'lonbnd']
	save = list(set(nci.variables.keys()).intersection(set(alwaysInclude) ) ) 
	save = list(set(sorted(save + self.vars)))
	
	# create dimensions:
	for d in nci.dimensions.keys():
	  if d in ['time',]: nco.createDimension(d, None)
	  elif d in ['depth', 'z',]: nco.createDimension(d, 1)
	  else:		     nco.createDimension(d, len(nci.dimensions[d]))

	# create Variables:
	for var in save:  nco.createVariable(var, nci.variables[var].dtype, nci.variables[var].dimensions,zlib=True,complevel=5)

	
	# Long Names:
	for var in save:
		varln = ''
		long_name = ''
		try:  	long_name=nci.variables[var].long_name
		except:	
			if self.debug: print 'depthManipNC:\tWarning:\tNo long_name for ', var
		if var in self.vars:
			long_name += ' '+self.depthStrings[str(self.varflag[var])]
		if self.timemean: long_name.replace('Daily', '')
		if self.timemean: long_name.replace('Monthly', '')
		nco.variables[var].long_name = long_name
		if self.debug: print 'depthManipNC:\tInfo:\tAdding long_name:',var,long_name
	# Units:
	for var in save: 
		units = ''
		try:  	units=nci.variables[var].units
		except: 
			if self.debug: print 'depthManipNC:\tWarning:\tNo units for ', var
		if var in self.vars:
		  if  self.varflag[var] == 1: units = units.replace('m^3', 'm^2')
		  
		nco.variables[var].units=units
		if self.debug: print 'depthManipNC:\tInfo:\tAdding units:',var,units

	if 'zbnd' in nci.variables.keys(): self.zbnd =nci.variables['zbnd'][:]
	if 'bathymetry' in nci.variables.keys(): self.bathy =nci.variables['bathymetry'][:]	
	
	# Fill Values:
	for var in save:
		if var not in self.vars: #no change
			 arr=nci.variables[var][:] 
		else: 
			flag = self.varflag[var]
			if self.debug: print 'depthManipNC:\tInfo:\tFilling var:',var, 'flag:', flag
			if flag == 1:	
				arr = (nci.variables[var][:] * abs((self.zbnd[:,:,:,:,1]-self.zbnd[:,:,:,:,0]))).sum(1)			
				arr= arr[:,None,:,:]
			elif flag in [-2,-1,0]: arr =nci.variables[var][:,flag,:,:]
			elif flag in [-15,]:	arr =self.bottomLayer(nci, var)
			else: 	
				arr =nci.variables[var][:,flag,:,:].mean(1)
				arr= arr[:,None,:,:]
			
			#while len(arr.shape) < len(nci.variables[var].dimensions): arr = marray(arr[None,:])
			
		timeav=False
		if self.timemean and len(intersection(['time','t'], nci.variables[var].dimensions)): timeav=True
		if timeav:
			if self.debug: print 'depthManipNC:\tInfo:\tSaving time averaged var:',var
			arr = marray([arr.mean(0),])
			while len(arr.shape) < len(nci.variables[var].dimensions): arr = marray(arr[None,:])

		if self.debug: print 'depthManipNC:\tInfo:\tSaving var:',var, arr.shape, '\tdims:', nci.variables[var].dimensions
		nco.variables[var][:] = arr
				
	# Close netcdfs:
	nco.close()
	nci.close()
	if self.debug: print 'depthManipNC:\tINFO:\tsuccessfully created:\t', self.fno
	return
  def _saveNC_(self):
	print 'GreenSeasXLtoNC:\tINFO:\tCreating a new dataset:\t', self.fno
	nco = Dataset(self.fno,'w')	
	nco.setncattr('CreatedDate','This netcdf was created on the '+ucToStr(date.today()) +' by '+getuser()+' using GreenSeasXLtoNC.py')
	nco.setncattr('Original File',self.fni)	
	for att in self.attributes.keys():
		print 'GreenSeasXLtoNC:\tInfo:\tAdding Attribute:', att, self.attributes[att]
		nco.setncattr(ucToStr(att), ucToStr(self.attributes[att]))
	
	nco.createDimension('i', None)
	
	nco.createVariable('index', 'i4', 'i',zlib=True,complevel=5)
	
	for v in self.saveCols:
		if v in self.dataIsAString:continue
		print 'GreenSeasXLtoNC:\tInfo:\tCreating var:',v,self.ncVarName[v], self.dataTypes[v]
		nco.createVariable(self.ncVarName[v], self.dataTypes[v], 'i',zlib=True,complevel=5)
	
	nco.variables['index'].long_name =  'Excel Row index'
	for v in self.saveCols:
		if v in self.dataIsAString:continue		
		print 'GreenSeasXLtoNC:\tInfo:\tAdding var long_name:',v,self.ncVarName[v], self.lineTitles[v]
		nco.variables[self.ncVarName[v]].long_name =  self.lineTitles[v]

	nco.variables['index'].units =  ''		
	for v in self.saveCols:
		if v in self.dataIsAString:continue		
		print 'GreenSeasXLtoNC:\tInfo:\tAdding var units:',v,self.ncVarName[v], self.unitTitles[v]	
		nco.variables[self.ncVarName[v]].units =  self.unitTitles[v].replace('[','').replace(']','')

	nco.variables['index'].metadata =  ''
	for v in self.saveCols:
		if v in self.dataIsAString:continue		
		print 'GreenSeasXLtoNC:\tInfo:\tAdding meta data:',v#, self.metadata[v]	
		nco.variables[self.ncVarName[v]].metadata =  self.metadata[v]

	nco.variables['index'].xl_column =  '-1'	
	for v in self.saveCols:
		if v in self.dataIsAString:continue	
		print 'GreenSeasXLtoNC:\tInfo:\tAdding excell column name:',v,self.ncVarName[v],':', self.colnames[v]
		nco.variables[self.ncVarName[v]].xl_column =  self.colnames[v]
	
	arr=[]
	for a,val in enumerate(self.index.values()):    
	    if not self.saveRows[a]: continue
	    if self.rowcounts[a] ==0 :continue
	    arr.append(val+1) # accounting for python 0th position is excels = 1st row
	nco.variables['index'][:] = marray(arr)
	
	for v in self.saveCols:
		if v in self.dataIsAString:continue		
		print 'GreenSeasXLtoNC:\tInfo:\tSaving data:',v,self.ncVarName[v],
		arr =  []
		for a,val in enumerate(self.data[v]):
			if not self.saveRows[a]: continue
			if self.rowcounts[a]==0:continue			
			arr.append(val)
		nco.variables[self.ncVarName[v]][:] = marray(arr)
		print len(arr)
				
	
	print 'GreenSeasXLtoNC:\tInfo:\tCreated ',self.fno
	nco.close()
  def _getData_(self):
	#loading file metadata
	header   = [h.value for h in self.datasheet.row(self.headR)]
	units    = [h.value for h in self.datasheet.row(self.unitR)]

	lastMetaColumn = 20
	locator  = [h.value for h in self.datasheet.row(self.locR)[:lastMetaColumn]]
	
	ckey={}
	for n,l in enumerate(locator):
	    if l in [ 'time','t', 'Date& Time (local)']: ckey['time']=n
	    if l.lower() in [ 'lat', 'latitude']: ckey['lat']=n
	    if l.lower() in [ 'lon','long', 'longitude']: ckey['lon']=n
	    if l in [ 'Depth of sample [m]']: ckey['z']=n
      	    if l in [ 'Depth of Sea [m]',]: ckey['bathy']=n
      	    if l in [ 'UTC offset',]: ckey['tOffset']=n
	    if l in ['Institute',]: ckey['Institute']=n
	
	bad_cells = [xl_cellerror,xl_cellempty,xl_cellblank]
	    
	metadataTitles = {r:h.value for r,h in enumerate(self.datasheet.col(self.metaC)[:self.maxMDR]) if h.ctype not in bad_cells}
	endofHeadRow=max(metadataTitles.keys())



	    
	#create excel coordinates for netcdf.
	colnames = {h: colname(h) for h,head in enumerate(self.datasheet.row(0))} # row number doesn't matter here

	# which columns are we saving?
	saveCols={}
	lineTitles={}
	unitTitles={}
	attributes={}	
	for l,loc in enumerate(locator):	
		if loc in ['', None]:continue
		print 'GreenSeasXLtoNC:\tInfo:\tFOUND:\t',l,'\t',loc, 'in locator'
		saveCols[l] = True
		lineTitles[l]=loc
		unitTitles[l]=''
		if loc.find('[') > 0:
		  unitTitles[l]=loc[loc.find('['):].replace(']','')
	
	if header[5].find('Note')>-1:
		attributes['Note'] = header[5]
		header[5]=''
	
	# flag for saving all columns:
	if 'all' in self.datanames:
		for head in header[lastMetaColumn:]:
			if head == '': continue	
			self.datanames.append(head)
	
	# add data columns titles to output to netcdf.
	for h,head in enumerate(header):
		if head == '':continue	
		if h in saveCols.keys(): continue
		for d in self.datanames:
			if h in saveCols.keys(): continue
			if head.lower().find(d.lower()) > -1:
				print 'GreenSeasXLtoNC:\tInfo:\tFOUND:\t',h,'\t',d, 'in ',head
				saveCols[h] = True
				lineTitles[h] = header[h]
				unitTitles[h] = units[h]				
	saveCols = sorted(saveCols.keys())	
	

	print 'GreenSeasXLtoNC:\tInfo:\tInterograting columns:',saveCols

	# Meta data for those columns with only one value:
	ncVarName={}
	allNames=[]
	for h in saveCols:
		name = self._getNCvarName_(lineTitles[h])
		#ensure netcdf variable keys are unique:
		if name in allNames:name+='_'+ucToStr(colnames[h])
		allNames.append(name)
		ncVarName[h] = name


	# make an index to link netcdf back to spreadsheet
	index = {}
	for r in xrange(len(self.datasheet.col(saveCols[0])[self.maxMDR:])):
		index[r] = r+self.maxMDR
			
	#create data dictionary
	data={}	
	tunit='seconds since 1900-00-00'
	unitTitles[ckey['time']] = tunit
	for d in saveCols:
		tmpdata= self.datasheet.col(d)[self.maxMDR:]
		arr = []
		if d == ckey['time']: # time
		    for a in tmpdata[:]:
			if a.ctype in bad_cells:
			    arr.append(default_fillvals['i8'])		   
			else:
			    try:  	arr.append(int64(date2num(parse(a.value),units=tunit)))
			    except:	
			    	try: 
			    		arr.append(int(a.value))
			    		print 'GreenSeasXLtoNC:\tWarning: Can not read time effecitvely:',int(a.value)
			    	except: arr.append(default_fillvals['i8'])
		    data[d] = marray(arr)			    
		    continue
		isaString = self._isaString_(lineTitles[d])
		if isaString: #strings
		   for a in tmpdata[:]:
			if a.ctype in bad_cells:
			    arr.append(default_fillvals['S1'])
			else: 
			    try:	arr.append(ucToStr(a.value))
			    except:	arr.append(default_fillvals['S1'])
		else: # data
		   for a in tmpdata[:]:		
			if a.ctype in bad_cells:
			    arr.append(default_fillvals['f4'])
			else:
			    try:   	arr.append(float(a.value))
			    except:	arr.append(default_fillvals['f4'])
		data[d] = marray(arr)
		
	fillvals = default_fillvals.values()							

	# count number of data in each column:
	print 'GreenSeasXLtoNC:\tInfo:\tCount number of data in each column...' # can be slow
	datacounts = {d:0 for d in saveCols}
	for d in saveCols:
		for i in data[d][:]:
			if i in ['', None, ]: continue
			if i in fillvals: continue			
	 		datacounts[d]+=1
	print 'GreenSeasXLtoNC:\tInfo:\tMax number of entries to in a column:', max(datacounts.values())
	
			
		
	# list data columns with no data or only one value	
	removeCol=[]
	for h in saveCols:
		if datacounts[h] == 0:
			print 'GreenSeasXLtoNC:\tInfo:\tNo data for column ',h,lineTitles[h],'[',unitTitles[h],']'
			removeCol.append(h)
			continue	
		col = sorted(data[h])
		if col[0] == col[-1]:
			if col[0] in fillvals:
				print 'GreenSeasXLtoNC:\tInfo:\tIgnoring masked column', h, lineTitles[h],'[',unitTitles[h],']'
				removeCol.append(h)				
				continue
			print 'GreenSeasXLtoNC:\tInfo:\tonly one "data": ',lineTitles[h],'[',unitTitles[h],']','value:', col[0]
			removeCol.append(h)
			attributes[makeStringSafe(ucToStr(lineTitles[h]))] = ucToStr(col[0])

	for r in removeCol:saveCols.remove(r)

	print 'GreenSeasXLtoNC:\tInfo:\tnew file attributes:', attributes	
	
	

		

	print 'GreenSeasXLtoNC:\tInfo:\tFigure out which rows should be saved...'
	saveRows  = {a: False for a in index.keys()} #index.keys() are rows in data. #index.values are rows in excel.
	rowcounts = {a: 0     for a in index.keys()}	
	
	for r in sorted(saveRows.keys()):
		if data[ckey['time']][r] in ['', None,]:
			print 'No time value:',r, data[ckey['time']][r]
			continue
		if data[ckey['time']][r] in fillvals:
			print 'No time value:',r, data[ckey['time']][r]		
			continue	
		for d in saveCols:
			if d<lastMetaColumn:continue
			if data[d][r] in ['', None, ]: continue
			if data[d][r] in fillvals: continue	
			rowcounts[r] += 1			
			saveRows[r] = True
	print 'GreenSeasXLtoNC:\tInfo:\tMaximum number of rows to save: ',max(rowcounts.values())  # 
	
	#rowcounts = {d:0 for d in saveRows.keys()}
	#for r in sorted(rowcounts.keys()):
	#	#if saveRows[r] == False: continue
	#	for d in saveCols:
	#		if d<20:continue
	#		if data[d][r] in ['', None, ]:continue
	#		if data[d][r] in fillvals: continue
	#		rowcounts[r] += 1
	
	
		

	# get data type (ie float, int, etc...):
	# netcdf4 requries some strange names for datatypes: 
	#	ie f8 instead of numpy.float64
	dataTypes={}
	dataIsAString=[]
	for h in saveCols:
		dataTypes[h] = marray(data[h]).dtype
		print 'GreenSeasXLtoNC:\tInfo:\ttype: ',ncVarName[h], h,'\t',dataTypes[h]
		if dataTypes[h] == float64: dataTypes[h] = 'f8'
		elif dataTypes[h] == int32: dataTypes[h] = 'i4'
		elif dataTypes[h] == int64: dataTypes[h] = 'i8'	
		else:
			dataTypes[h] = 'S1'
			dataIsAString.append(h)

		
	print 'GreenSeasXLtoNC:\tInfo:\tCreate MetaData...'	
	#create metadata.
	metadata = {}
	for h in saveCols:
		if h in dataIsAString:continue
		datacol = self.datasheet.col(h)[:]
		colmeta = {metadataTitles[mdk]: datacol[mdk] for mdk in metadataTitles.keys() if metadataTitles[mdk] not in ['', None]}
		
		md='  '
		if len(colmeta.keys())> 20:
			print 'Too many metadata'
			print 'GreenSeasXLtoNC:\tWarning:\tMetadata reading failed. Please Consult original excel file for more info.'			
			metadata[h] = 'Metadata reading failed. Please Consult original excel file for more info.'
			continue
		for mdt,mdc in zip(colmeta.keys(),colmeta.values() ):
			if mdc in ['', None]:continue
			md +=ucToStr(mdt)+':\t'+ucToStr(mdc)+'\n  '
			#print md
		metadata[h] = md
	
		

	# save all info as public variables, so that it can be accessed if netCDF creation fails:
	self.saveCols = saveCols
	self.saveRows = saveRows
	self.rowcounts=rowcounts
	self.ncVarName = ncVarName
	self.dataTypes = dataTypes
	self.dataIsAString=dataIsAString
	self.metadata=metadata
	self.colnames = colnames
	self.data = data
	self.lineTitles = lineTitles
	self.unitTitles = unitTitles
	self.attributes = attributes
	self.index = index
  def run(self):
	if not exists(self.fni):
		print 'convertToOneDNC:\tERROR:\tinputfile name does not exists:', self.fni
		return
		  
	nci = Dataset(self.fni,'r')
	
	if not self.vars:
		self.vars = nci.variables.keys() # save all


	#check that there are some overlap between input vars and nci:
	for v in self.vars:
		if v in nci.variables.keys():continue
		print 'convertToOneDNC:\tERROR:\tvariable,' ,v,', not found in ',self.fni
		return
		
	#create dataset and header.
	if self.debug: print 'convertToOneDNC:\tINFO:\tCreating a new dataset:\t', self.fno
	nco = Dataset(self.fno,'w')
	for a in nci.ncattrs():
		if self.debug: print 'convertToOneDNC:\tINFO:\tcopying attribute: \t\"'+a+'\":\t', nci.getncattr(a)
		nco.setncattr(a,nci.getncattr(a))
	
	appendToDesc= 'Reprocessed on '+todaystr()+' by '+getuser()+' using convertToOneDNC.py'
	try: nco.Notes = nci.Notes + '\n\t\t'+appendToDesc
	except: nco.Notes = appendToDesc
	


	save =   list(set(nci.variables.keys()).intersection(set(alwaysInclude) ) ) 
	save = sorted(list(set(sorted(save + self.vars))))
	

	# test to find out which coordinates should be saved.	
	if not self.dictToKeep:
		CoordsToKeep,save=getCoordsToKeep(nci,save,newMask=self.newMask,debug = self.debug)
	else:
		CoordsToKeep = self.dictToKeep


	
		
	# create dimensions:
	#for d in nci.dimensions.keys():
	#  if d in ['time',]: nco.createDimension(d, None)
	#  else:		     nco.createDimension(d, len(nci.dimensions[d]))
	nco.createDimension('index', None)

	# create Variables:

	nco.createVariable('index', int64, ['index',],zlib=True,complevel=5)#,chunksizes=10000)	
	nco.createVariable('index_t', int64, ['index',],zlib=True,complevel=5)#,chunksizes=10000)		
	nco.createVariable('index_z', int64, ['index',],zlib=True,complevel=5)#,chunksizes=10000)		
	nco.createVariable('index_y', int64, ['index',],zlib=True,complevel=5)#,chunksizes=10000)		
	nco.createVariable('index_x', int64, ['index',],zlib=True,complevel=5)#,chunksizes=10000)
	for var in save:
		nco.createVariable(var, nci.variables[var].dtype, ['index',],zlib=True,complevel=5)#,chunksizes=10000)
	
	# Long Names:
	nco.variables['index'].long_name='index'
	nco.variables['index_t'].long_name='index - time'	
	nco.variables['index_z'].long_name='index - depth'
	nco.variables['index_y'].long_name='index - latitude'
	nco.variables['index_x'].long_name='index - longitude'
			
	for var in save: 
		try:  	long_name=nci.variables[var].long_name
		except:	
		  if self.debug: print 'convertToOneDNC:\tWarning:\tNo long_name for ', var
		  long_name = var
		  
		nco.variables[var].long_name=long_name
		if self.debug: print 'convertToOneDNC:\t Adding long_name for ', var, long_name
		  
	# Units:
	nco.variables['index'].units=''
	nco.variables['index_t'].units=''
	nco.variables['index_z'].units=''
	nco.variables['index_y'].units=''
	nco.variables['index_x'].units=''
					
	for var in save: 
		try:  	nco.variables[var].units=nci.variables[var].units
		except: print 'convertToOneDNC:\tWarning:\tNo units for ', var	
		
	# Fill Values:
	sorted_Coords = sorted(CoordsToKeep.iteritems(), key=itemgetter(1))

	data={}
	if self.debug: print 'convertToOneDNC:\tINFO:\tCopying index  ...' , len(sorted_Coords)	
#	nco.variables['index'][:] = [ int(a[1]) for a in sorted_Coords]	
	nco.variables['index'][:] = array([ a[1] for a in sorted_Coords])
	nco.sync()	
	if self.debug: print 'convertToOneDNC:\tINFO:\tCopying index t ...' 	
	nco.variables['index_t'][:] = array([a[0][0] for a in sorted_Coords])
	nco.sync()
	if self.debug: print 'convertToOneDNC:\tINFO:\tCopying index z ...' 			
	nco.variables['index_z'][:] = array([a[0][1] for a in sorted_Coords])
	nco.sync()		
	if self.debug: print 'convertToOneDNC:\tINFO:\tCopying index y ...' 	
	nco.variables['index_y'][:] = array([a[0][2] for a in sorted_Coords])
	nco.sync()		
	if self.debug: print 'convertToOneDNC:\tINFO:\tCopying index x ...' 	
	nco.variables['index_x'][:] = array([a[0][3] for a in sorted_Coords])
	nco.sync()	
		
	
	for var in save:
		if self.debug: print 'convertToOneDNC:\tINFO:\tCopying ', var, ' ...' 
		arr = nci.variables[var][:]
		outarr = []
		if arr.ndim ==1:
			if var.lower() in ['time','time_counter','t']:	d = 0
			if var.lower() in ['depth','deptht',]:		d = 1
			if var.lower() in ['latbnd','lat','latitude']:	d = 2			
			if var.lower() in ['lonbnd','lon','longitude']:d = 3
			#for c in (CoordsToKeep.keys()):	
			for c in sorted_Coords:
				outarr.append(arr[c[0][d]])
			try: print var, d
			except: var, "not found"
		elif arr.ndim ==2:
			if var.lower() in ['nav_lat','nav_lon']:	d = (2,3)
			print var, 'lendth : 2', d
			
			for c in sorted_Coords:
			#for c in sorted(CoordsToKeep.keys()):	
				outarr.append(arr[(c[0][2:])])				
		else:
		    #for c in sorted(CoordsToKeep.keys()):
		    	for c in sorted_Coords:
				outarr.append(arr[c[0]])
		outarr= marray(outarr)
		if self.debug: print 'convertToOneDNC:\tINFO:\tSaving var:',var, arr.shape, '->', outarr.shape , 'coords:',len(sorted_Coords)
		nco.variables[var][:] =outarr
		nco.sync()	
	# Close netcdfs:
	nco.close()
	nci.close()
	if self.debug: print 'convertToOneDNC:\tINFO:\tsuccessfully created:\t', self.fno
	return				
Beispiel #8
0
  def run(self):	
	if not self.vars:
		print 'pruneNC:\tERROR:\tvariables to save are no use:', self.vars
		return
	if not exists(self.fni):
		print 'pruneNC:\tERROR:\tinputfile name does not exists:', self.fni
		return
				
	nci = Dataset(self.fni,'r')#Quiet =True)
	
	if self.depthInt: 
		print 'FAIL: maybe you should look at the depthManip.py class instead. This one only removes variables from a netcdf.'
		return
		
	
	#check that there are some overlap between input vars and nci:
	for v in self.vars:
		if v in nci.variables.keys():continue
		print 'pruneNC:\tERROR:\tvariable,' ,v,', not found in ',self.fni
		return
		
	#create dataset and header.
	if self.debug: print 'pruneNC:\tINFO:\tCreating a new dataset:\t', self.fno
	nco = Dataset(self.fno,'w')
	for a in nci.ncattrs():
		if self.debug: print 'pruneNC:\tINFO:\tcopying attribute: \t\"'+a+'\":\t', nci.getncattr(a)
		nco.setncattr(a,nci.getncattr(a))	
	appendToDesc= 'Reprocessed on '+todaystr()+' by '+getuser()+' using pruneNC.py'
	try: nco.Notes = nci.Notes + '\n\t\t'+appendToDesc
	except: nco.Notes = appendToDesc
	
	# list of variables to save, assuming some conventions
	alwaysInclude = ['time', 'lat','lon', 'latbnd', 'lonbnd']
	save =   list(set(nci.variables.keys()).intersection(set(alwaysInclude) ) ) 
	save = list(set(sorted(save + self.vars)))
	
	# create dimensions:
	for d in nci.dimensions.keys():
	  if d in ['time',]: nco.createDimension(d, None)
	  else:		     nco.createDimension(d, len(nci.dimensions[d]))

	# create Variables:
	for var in save:  nco.createVariable(var, nci.variables[var].dtype, nci.variables[var].dimensions,zlib=True,complevel=5)

	# Long Names:
	for var in save: 
		try:  	long_name=nci.variables[var].long_name
		except:	
		  print 'pruneNC:\tWarning:\tNo long_name for ', var
		  long_name = var
		  
		if self.timemean: long_name.replace('Daily', 'Monthly')	
		nco.variables[var].long_name=long_name
		if self.debug: print 'pruneNC:\t Adding long_name for ', var, long_name
		  
	# Units:
	for var in save: 
		try:  	nco.variables[var].units=nci.variables[var].units
		except: print 'pruneNC:\tWarning:\tNo units for ', var	
		
	# Fill Values:
	for var in save:
		if self.debug: print 'pruneNC:\tINFO:\tCopying ', var, ' ...' 
		arr = nci.variables[var][:]
		
		if self.timemean and len(intersection(['time','t'], nci.variables[var].dimensions)):
			if self.debug: print 'pruneNC:\tInfo:\tSaving time averaged var:',var
			arr = marray([arr.mean(0),])
			while len(arr.shape) < len(nci.variables[var].dimensions): arr = marray(arr[None,:])
			
		if self.debug: print 'pruneNC:\tInfo:\tSaving var:',var, arr.shape, '\tdims:', nci.variables[var].dimensions
		nco.variables[var][:] =arr

	# Close netcdfs:
	nco.close()
	nci.close()
	if self.debug: print 'pruneNC:\tINFO:\tsuccessfully created:\t', self.fno
	return
Beispiel #9
0
  def run(self):
	if not exists(self.fni):
		print 'convertToOneDNC:\tERROR:\tinputfile name does not exists:', self.fni
		assert False
		return
		  
	nci = Dataset(self.fni,'r')
	
	if not self.vars:
		self.vars = nci.variables.keys() # save all


	#check that there are some overlap between input vars and nci:
	for v in self.vars:
		if v in nci.variables.keys():continue
		print 'convertToOneDNC:\tERROR:\tvariable,' ,v,', not found in ',self.fni
		return
		
	#create dataset and header.
	if self.debug: print 'convertToOneDNC:\tINFO:\tCreating a new dataset:\t', self.fno
	nco = Dataset(self.fno,'w')
	for a in nci.ncattrs():
		if self.debug: print 'convertToOneDNC:\tINFO:\tcopying attribute: \t\"'+a+'\":\t', nci.getncattr(a)
		nco.setncattr(a,nci.getncattr(a))
	
	appendToDesc= 'Reprocessed on '+todaystr()+' by '+getuser()+' using convertToOneDNC.py'
	try: nco.Notes = nci.Notes + '\n\t\t'+appendToDesc
	except: nco.Notes = appendToDesc
	


	save =   list(set(nci.variables.keys()).intersection(set(alwaysInclude) ) ) 
	save = sorted(list(set(sorted(save + self.vars))))
	

	# test to find out which coordinates should be saved.	
	if not self.dictToKeep:
		CoordsToKeep,save=getCoordsToKeep(nci,save,newMask=self.newMask,debug = self.debug)
	else:
		CoordsToKeep = self.dictToKeep

	print "convertToOneDNC:\tinfo:\tCoordsToKeep:", save#, self.dictToKeep
	
		
	# create dimensions:
	nco.createDimension('index', None)

	# create Variables:
	nco.createVariable('index',   int64, ['index',],zlib=True,complevel=5)#,chunksizes=10000)	
	nco.createVariable('index_t', int64, ['index',],zlib=True,complevel=5)#,chunksizes=10000)		
	nco.createVariable('index_z', int64, ['index',],zlib=True,complevel=5)#,chunksizes=10000)		
	nco.createVariable('index_y', int64, ['index',],zlib=True,complevel=5)#,chunksizes=10000)		
	nco.createVariable('index_x', int64, ['index',],zlib=True,complevel=5)#,chunksizes=10000)
	for var in save:
		if var in ['index','index_t','index_z','index_y','index_x']:continue
		nco.createVariable(var, nci.variables[var].dtype, ['index',],zlib=True,complevel=5)#,chunksizes=10000)
	
	# Long Names:
	nco.variables['index'].long_name='index'
	nco.variables['index_t'].long_name='index - time'	
	nco.variables['index_z'].long_name='index - depth'
	nco.variables['index_y'].long_name='index - latitude'
	nco.variables['index_x'].long_name='index - longitude'
			
	for var in save: 
		try:  	long_name=nci.variables[var].long_name
		except:	
		  if self.debug: print 'convertToOneDNC:\tWARNING:\tNo long_name for ', var
		  long_name = var
		  
		nco.variables[var].long_name=long_name
		if self.debug: print 'convertToOneDNC:\tINFO:\tAdding long_name for ', var, '\t',long_name
		  
	# Units:
	nco.variables['index'].units=''
	nco.variables['index_t'].units=''
	nco.variables['index_z'].units=''
	nco.variables['index_y'].units=''
	nco.variables['index_x'].units=''
					
	for var in save: 
		try:  	
			nco.variables[var].units=nci.variables[var].units
			if self.debug: print 'convertToOneDNC:\tINFO:\tAdding units for ', var, '\t',nci.variables[var].units		
		except: print 'convertToOneDNC:\tWARNING:\tNo units for ', var	
		
	# Fill Values:
	def itemsgetter(a):
    		return a[1][0]

	sorted_Coords = sorted(CoordsToKeep.iteritems(), key=itemsgetter)
	print "convertToOneDNC:\tINFO:\tsorted_Coords:",sorted_Coords[0],sorted_Coords[-1]
	data={}
	if self.debug: print 'convertToOneDNC:\tINFO:\tCopying index  ...' , len(sorted_Coords)	
#	nco.variables['index'][:] = [ int(a[1]) for a in sorted_Coords]	

	nco.variables['index'][:] = array([ a[1][0] for a in sorted_Coords])
	nco.sync()
	
	# 4D coordinates:
	if len(sorted_Coords[0][0]) ==4:
		print "4D:", sorted_Coords[0][0]
		if self.debug: print 'convertToOneDNC:\tINFO:\tCopying index t ...' 	
		nco.variables['index_t'][:] = array([a[0][0] for a in sorted_Coords])
		nco.sync()
		if self.debug: print 'convertToOneDNC:\tINFO:\tCopying index z ...' 			
		nco.variables['index_z'][:] = array([a[0][1] for a in sorted_Coords])
		nco.sync()		
		if self.debug: print 'convertToOneDNC:\tINFO:\tCopying index y ...' 	
		nco.variables['index_y'][:] = array([a[0][2] for a in sorted_Coords])
		nco.sync()		
		if self.debug: print 'convertToOneDNC:\tINFO:\tCopying index x ...' 	
		nco.variables['index_x'][:] = array([a[0][3] for a in sorted_Coords])
		nco.sync()	
		
	if len(sorted_Coords[0][0]) ==3:
		print "3D:", sorted_Coords[0][0]
		if self.debug: print 'convertToOneDNC:\tINFO:\tCopying index t ...' 	
		nco.variables['index_t'][:] = array([a[0][0] for a in sorted_Coords])
		nco.sync()
		#if self.debug: print 'convertToOneDNC:\tINFO:\tCopying index z ...' 			
		nco.variables['index_z'][:] = zeros(len(sorted_Coords))
		#nco.sync()		
		if self.debug: print 'convertToOneDNC:\tINFO:\tCopying index y ...' 	
		nco.variables['index_y'][:] = array([a[0][1] for a in sorted_Coords])
		nco.sync()		
		if self.debug: print 'convertToOneDNC:\tINFO:\tCopying index x ...'
		#tempArr = []
		#for a in sorted_Coords:
		#	print a[0]
		#	tempArr.append(a[0][2])
			
		nco.variables['index_x'][:] = array([a[0][2] for a in sorted_Coords])
		nco.sync()		
	errorScript = "If it is failing here, then you need to check that your dimensions are names correctly in netcdf_manip/alwaysInclude.py:timeNames, depthNames, lonnames and latnames"
	
	for var in save:
		if self.debug: print 'convertToOneDNC:\tINFO:\tCopying ', var, ' ...'
		arr = nci.variables[var][:]
		if len(arr)==0:
			print 'convertToOneDNC:\tWarning:\tIt looks like the netcdf ',self.fni,'does not contain the variable', var
			 
		outarr = []
		if arr.ndim ==1 and len(sorted_Coords[0][0]) == 4:
			if var.lower() in ['time','time_counter','t','month', 'time_centered']:	d = 0
			if var.lower() in timeNames:	d = 0
			if var.lower() in depthNames:	d = 1
			if var.lower() in latnames:	d = 2			
			if var.lower() in lonnames: 	d = 3
			print var, 'convertToOneDNC:\tINFO:\tndim: (1-4)',arr.ndim, var, sorted_Coords[0][0], d, #arr[0]
			for c in sorted_Coords:
				try:	outarr.append(arr[c[0][d]])
				except:	raise AssertionError(errorScript)	 		
				
			try: print var, d
			except: var, "not found"
			
		elif arr.ndim ==1 and len(sorted_Coords[0][0]) ==1:
			d = 0	
			print var, 'convertToOneDNC:\tINFO:\tndim: (1-1)',arr.ndim,var, sorted_Coords[0][0], d		
			for c in sorted_Coords:
				try:	outarr.append(arr[c[0][d]])
				except:	raise AssertionError(errorScript)	 						
			try: print var, d
			except: var, "not found"
						
		elif arr.ndim ==1 and len(sorted_Coords[0][0]) ==3:
			if var.lower() in ['time','time_counter','t','month','time_centered',]:	d = 0
			if var.lower() in timeNames:	d = 0
			#if var.lower() in depthNames:		d = 1
			if var.lower() in latnames:	d = 1			
			if var.lower() in lonnames: 	d = 2
			#for c in (CoordsToKeep.keys()):
			print var, 'convertToOneDNC:\tINFO:\tndim: (1-3)',arr.ndim,var, sorted_Coords[0][0], d									
			for c in sorted_Coords:
				try:	outarr.append(arr[c[0][d]])
				except:	raise AssertionError(errorScript)	 		
			try: print var, d
			except: var, "not found"
			
		elif arr.ndim ==2:
			if var.lower() in ['nav_lat','nav_lon']:			
				d = (2,3)
			elif var.lower() in ['deptht','depthu','depthv','latitude','longitude','depth',]:		
				d = (0,1)
			elif var.lower() in ['mask'] and len(sorted_Coords[0][0]) ==3: 	
				d = (1,2) # because of MLD datasets.
			else:	d = (0,1)
			
			print var, 'convertToOneDNC:\tINFO:\tndim: (2)',arr.ndim,var, sorted_Coords[0][0], d
			for c in sorted_Coords:			#for c in sorted(CoordsToKeep.keys()):	
				try:	outarr.append(arr[(c[0][d[0]],c[0][d[1]])])	
				except:	raise AssertionError(errorScript)	 		
		elif arr.ndim ==3:
			print var, 'convertToOneDNC:\tINFO:\tndim: (3)',arr.ndim,var, sorted_Coords[0][0]
			for c in sorted_Coords:
				if len(c[0]) == 4: outarr.append(arr[(c[0][0],c[0][2],c[0][3])])
				if len(c[0]) == 3: outarr.append(arr[(c[0][0],c[0][1],c[0][2])])
		elif arr.ndim ==4:
			print var, 'convertToOneDNC:\tINFO:\tndim: (4)',arr.ndim,var, sorted_Coords[0][0]
		    	for c in sorted_Coords:
		    		#print 'convertToOneD:',var, c, c[0], arr.shape, arr[c[0]]
				outarr.append(arr[ (c[0][0],c[0][1],c[0][2],c[0][3]) ])
		else:
			print "How many dimensions?", arr.ndim, len(sorted_Coords[0][0])
			assert False
		outarrnm = np.array(outarr)			
		print var, [outarrnm.min(),outarrnm.max()],	'(no mask)'		
		outarr = marray(outarr)
		print var, [outarr.min(),outarr.max()], '(masked)'
		if self.debug: print 'convertToOneDNC:\tINFO:\tSaving var:',var, arr.shape, '->', outarr.shape , 'coords:',len(sorted_Coords), [outarr.min(),outarr.max()]
		nco.variables[var][:] = outarr
		nco.sync()	
	# Close netcdfs:
	nco.close()
	nci.close()
	if self.debug: print 'convertToOneDNC:\tINFO:\tsuccessfully created:\t', self.fno
	
	#nc = Dataset(self.fno, 'r')
	#lon = nc.variables['lon'][:]
	#print lon.min(), lon.max()
	#if self.fno.find('Model')>-1:	assert 0
	return