def __call__(self, varName, sDTime, eDTime, BBox=None, res=None, delT=None): ''' res : spa. res. of 2d-array sDTime : DTime bound left eDTime : DTime bound right ''' mapCode = '^' + ''.join(str(res).split('.')) gpmData = GPM_data() srcDir = os.path.join(self.dataDir, self.prdDir) assert os.path.exists(srcDir), '{} is not exists.'.format(srcDir) Granule = self.search_granules(srcDir, sDTime, eDTime, BBox) if len(Granule) == 0: print '! Warning ! no data extracted' return None outSize = sum([len(gra[2]) for gra in Granule]), Granule[0][2].shape[1] Lat = empty(outSize, 'float32') Lon = empty(outSize, 'float32') aOut = empty(outSize, 'float32') DTime = [] prvI = 0 for granule in Granule: srcPath, dtime, lat, lon, idx = granule gpmData.srcPath.append(srcPath) gpmData.recLen.append( len(dtime)) # number of data record for each file nxtI = prvI + len(dtime) aOut[prvI:nxtI] = self.func_read(srcPath, varName, idx.tolist()) Lat[prvI:nxtI] = lat Lon[prvI:nxtI] = lon DTime.extend(dtime) if res != None and delT == None: gpmData.griddata.append( granule2map(lat, lon, aOut[prvI:nxtI], BBox, res)) gpmData.grid = GridCoordinates(mapCode, BBox=BBox) prvI = nxtI if delT != None: dtBnd = dtrange(sDTime, eDTime, delT) gpmData.tbound = map(None, dtBnd[:-1], dtBnd[1:]) gpmData.dtime = bin_bytbound(DTime, dtBnd, DTime) gpmData.lat = bin_bytbound(DTime, dtBnd, Lat) gpmData.lon = bin_bytbound(DTime, dtBnd, Lon) gpmData.data = bin_bytbound(DTime, dtBnd, aOut) if res != None: gpmData.griddata = [ granule2map(lat, lon, a, BBox, res) for lat, lon, a in map( None, gpmData.lat, gpmData.lon, gpmData.data) ] gpmData.grid = GridCoordinates(mapCode, BBox=BBox) else: gpmData.dtime = DTime gpmData.lat = Lat gpmData.lon = Lon gpmData.data = aOut return gpmData
def __call__(self, varName, sDTime, eDTime, BBox=None, res=None, delT=None, verbose=True): ''' res : spa. res. of 2d-array # not in service sDTime : DTime bound left eDTime : DTime bound right ''' csData = CloudSat_data() srcDir = os.path.join( self.dataDir, "%s-%s.%s"%( self.prdLv, self.prdName, self.prdVer) ) assert os.path.exists( srcDir ), "{} is not exists.".format( srcDir) try: Granule = self.search_granules( srcDir, sDTime, eDTime, BBox , verbose) except IOError: print "No granule by %s"%(__file__.split("/")[-1]) raise IOError nbin = self.nbin #outSize = sum( [ len(gra[2]) for gra in Granule ] ), Granule[0][2].shape[1], nbin #outSize = sum( [ len(gra[2]) for gra in Granule ] ), nbin #Lat = empty( outSize[:-1], "float32") #Lon = empty( outSize[:-1], "float32") #aOut = empty( outSize, "float32") #DTime = empty( outSize[:-1], "object" ) Lat = deque([]) Lon = deque([]) aOut = deque([]) DTime = deque([]) #prvI = 0 for granule in Granule: srcPath, dtime, lat, lon, idx = granule ''' csData.srcPath.append(srcPath) csData.recLen.append( len(dtime) ) # number of data record for each file nxtI = prvI + len(dtime) aOut[prvI:nxtI] = self.func_read( srcPath, varName, idx.tolist() ) Lat[prvI:nxtI] = lat Lon[prvI:nxtI] = lon DTime[prvI:nxtI]= dtime """ if res != None and delT == None: csData.griddata.append( granule2map( lat, lon, aOut[prvI:nxtI], BBox, res ) ) gpmData.grid = GridCoordinates(mapCode, BBox=BBox) """ prvI = nxtI ''' mskLat = ma.masked_inside( lat, BBox[0][0], BBox[1][0] ).mask mskLon = ma.masked_inside( lon, BBox[0][1], BBox[1][1] ).mask msk = mskLat * mskLon if type(msk)== np.bool_: # if msk == False msk = array([False]*len(lat)) Lat .extend(lat [msk]) Lon .extend(lon [msk]) aOut .extend(self.func_read( srcPath, varName, idx.tolist() )[msk,:]) dtime = dtime[msk] DTime.extend(dtime) csData.srcPath.append(srcPath) csData.recLen.append( len(dtime) ) # number of data record for each file # Time binning if delT != None: dtBnd = dtrange(sDTime, eDTime, delT) else: dtBnd = [sDTime, eDTime] csData.dtime = bin_bytbound( DTime, dtBnd, array(DTime) ) csData.lat = bin_bytbound( DTime, dtBnd, array(Lat ) ) csData.lon = bin_bytbound( DTime, dtBnd, array(Lon ) ) csData.data = bin_bytbound( DTime, dtBnd, array(aOut ) ) return csData
def __call__(self, varName, sDTime, eDTime, BBox=None, res=None, delT=None): ''' res : spa. res. of 2d-array sDTime : DTime bound left eDTime : DTime bound right ''' mapCode = '^' + ''.join( str(res).split('.') ) gpmData = GPM_data() srcDir = os.path.join( self.dataDir, self.prdDir ) assert os.path.exists( srcDir ), '{} is not exists.'.format( srcDir ) Granule = self.search_granules( srcDir, sDTime, eDTime, BBox ) outSize = sum( [ len(gra[2]) for gra in Granule ] ), Granule[0][2].shape[1] Lat = empty( outSize, 'float32') Lon = empty( outSize, 'float32') aOut = empty( outSize, 'float32' ) DTime = [] prvI = 0 for granule in Granule: srcPath, dtime, lat, lon, idx = granule gpmData.srcPath.append(srcPath) gpmData.recLen.append( len(dtime) ) # number of data record for each file nxtI = prvI + len(dtime) aOut[prvI:nxtI] = self.func_read( srcPath, varName, idx.tolist() ) Lat[prvI:nxtI] = lat Lon[prvI:nxtI] = lon DTime.extend(dtime) if res != None and delT == None: gpmData.griddata.append( granule2map( lat, lon, aOut[prvI:nxtI], BBox, res ) ) gpmData.grid = GridCoordinates(mapCode, BBox=BBox) prvI = nxtI if delT != None: dtBnd = dtrange(sDTime, eDTime, delT) gpmData.tbound = map( None, dtBnd[:-1], dtBnd[1:] ) gpmData.dtime = bin_bytbound( DTime, dtBnd, DTime ) gpmData.lat = bin_bytbound( DTime, dtBnd, Lat ) gpmData.lon = bin_bytbound( DTime, dtBnd, Lon ) gpmData.data = bin_bytbound( DTime, dtBnd, aOut ) if res != None: gpmData.griddata = [ granule2map(lat, lon, a, BBox, res) for lat, lon, a in map(None, gpmData.lat, gpmData.lon, gpmData.data) ] gpmData.grid = GridCoordinates(mapCode, BBox=BBox) else: gpmData.dtime = DTime gpmData.lat = Lat gpmData.lon = Lon gpmData.data = aOut return gpmData
def __call__(self, varName, sDTime, eDTime, BBox=None, res=None, delT=None, LonCrdCent="A", verbose=True): ''' res : spa. res. of 2d-array # not in service sDTime : DTime bound left eDTime : DTime bound right LonCrdCent: Center longitude of map coordination. "A"(tlantic): longitute takes range of -180 ~ 180 (CloudSat Default) "P"(acific) : longitute takes range of 0 ~ 360 ''' csData = CloudSat_data() srcDir = os.path.join( self.dataDir, "%s-%s.%s"%( self.prdLv, self.prdName, self.prdVer) ) assert os.path.exists( srcDir ), "{} is not exists.".format( srcDir) try: Granule = self.search_granules( srcDir, sDTime, eDTime, BBox , verbose) except IOError: print "No granule by %s"%(__file__.split("/")[-1]) raise IOError nbin = self.nbin #outSize = sum( [ len(gra[2]) for gra in Granule ] ), Granule[0][2].shape[1], nbin #outSize = sum( [ len(gra[2]) for gra in Granule ] ), nbin #Lat = empty( outSize[:-1], "float32") #Lon = empty( outSize[:-1], "float32") #aOut = empty( outSize, "float32") #DTime = empty( outSize[:-1], "object" ) Lat = deque([]) Lon = deque([]) aOut = deque([]) DTime = deque([]) #prvI = 0 for granule in Granule: srcPath, dtime, lat, lon, idx = granule ''' csData.srcPath.append(srcPath) csData.recLen.append( len(dtime) ) # number of data record for each file nxtI = prvI + len(dtime) aOut[prvI:nxtI] = self.func_read( srcPath, varName, idx.tolist() ) Lat[prvI:nxtI] = lat Lon[prvI:nxtI] = lon DTime[prvI:nxtI]= dtime """ if res != None and delT == None: csData.griddata.append( granule2map( lat, lon, aOut[prvI:nxtI], BBox, res ) ) gpmData.grid = GridCoordinates(mapCode, BBox=BBox) """ prvI = nxtI ''' mskLat = ma.masked_inside( lat, BBox[0][0], BBox[1][0] ).mask [[lllat, lllon],[urlat,urlon]] = BBox if ( (lllon<=180) & (urlon<=180) ): mskLon = ma.masked_outside( lon, BBox[0][1], BBox[1][1] ).mask elif ( (lllon<=180) & (180<urlon) ): mskLon = ma.masked_inside( lon, urlon-360, lllon ).mask atmp = ma.masked_inside( lon, urlon-360, lllon ) elif ( (180<lllon) & (180<urlon) ): mskLon = ma.masked_outside( lon, lllon-360, urlon-360 ).mask else: print "Check BBox",BBox print "by: search_granules.py" sys.exit() #msk = mskLat * mskLon msk = mskLat + mskLon # 2018/1/15 if type(msk)== np.bool_: # if msk == False msk = array([False]*len(lat)) Lat .extend(lat [msk]) Lon .extend(lon [msk]) aOut .extend(self.func_read( srcPath, varName, idx.tolist() )[msk,:]) dtime = dtime[msk] DTime.extend(dtime) csData.srcPath.append(srcPath) csData.recLen.append( len(dtime) ) # number of data record for each file if LonCrdCent =="A": pass elif LonCrdCent=="P": Lon =(ma.masked_inside(array(Lon), 0, 180)+360).data # Time binning if delT != None: dtBnd = dtrange(sDTime, eDTime, delT) else: dtBnd = [sDTime, eDTime] csData.dtime = bin_bytbound( DTime, dtBnd, array(DTime) ) csData.lat = bin_bytbound( DTime, dtBnd, array(Lat ) ) csData.lon = bin_bytbound( DTime, dtBnd, array(Lon ) ) csData.data = bin_bytbound( DTime, dtBnd, array(aOut ) ) return csData