def SetStartEndDates(self, periodD): self.startdate = mj_dt.IntYYYYMMDDDate(periodD['startyear'], periodD['startmonth'], periodD['startday']) self.enddate = mj_dt.IntYYYYMMDDDate(periodD['endyear'], periodD['endmonth'], periodD['endday']) self.startdatestr = mj_dt.DateToStrDate(self.startdate) self.enddatestr = mj_dt.DateToStrDate(self.enddate) if self.enddate < self.startdate: exitstr = 'period starts after ending' exit(exitstr)
def _SearchExtractLayers(self,acqdate): '''Search for extracted layers for specific SMAP tile ''' self.nrExploded = 0 # self.explodeD is not used self.explodeD = {} for extcomp in self.extractL: paramD = dict(zip(self.paramL,extcomp)) compD = dict(zip(self.compL,extcomp)) comp = Composition(compD, self.process.system.dstsystem, self.process.system.dstdivision) #Set the datum acqdatestr = mj_dt.DateToStrDate(acqdate) datumD = {'acqdatestr': acqdatestr, 'acqdate':acqdate} #Construct the locus dictionary locusD = {'locus':'global','path':'global'} filepath = lambda: None filepath.volume = self.process.dstpath.volume; filepath.hdrfiletype = self.process.dstpath.hdrfiletype #Create a standard raster layer layer = RasterLayer(comp, locusD, datumD, filepath) if not layer._Exists() or self.process.overwrite: self.explodeD[paramD['band']] = {'layer':layer,'params':paramD} elif layer._Exists(): self.session._InsertLayer(layer,self.process.overwrite,self.process.delete) self.nrExploded += 1
def SetMstep(self): self.dstep = self.periodstep = 0 self.pdTS = PandasTS(self.timestep) npTS = self.pdTS.SetMonthsFromPeriod(self) #npTS = self.pdTS.SetDatesFromPeriod(self) for d in range(npTS.shape[0]): acqdate = npTS[d].date() acqlastdate = mj_dt.AddMonth(acqdate, 1) acqlastdate = mj_dt.DeltaTime(acqlastdate, -1) if acqlastdate <= self.enddate: acqdatestr = mj_dt.DateToStrDate(acqdate) if self.timestep in ['monthlyday']: pass else: acqdatestr = acqdatestr[0:6] self.datumL.append(acqdatestr) self.datumD[acqdatestr] = { 'acqdate': acqdate, 'acqdatestr': acqdatestr, 'season': acqdate.month } self.moviedatum = '%s-%s' % (self.datumL[0], self.datumL[len(self.datumL) - 1])
def SingleDateTimeStep(self, periodD): self.startdate = self.enddate = mj_dt.IntYYYYMMDDDate( periodD['startyear'], periodD['startmonth'], periodD['startday']) self.startdatestr = self.enddatestr = mj_dt.DateToStrDate( self.startdate) self.datumD[self.startdatestr] = { 'acqdate': self.startdate, 'acqdatestr': self.startdatestr }
def SetDstep(self): self.pdTS = PandasTS(self.timestep) npTS = self.pdTS.SetDatesFromPeriod(self) #lastDate = npTS[-1] self.pandasCode = self.timestep for d in npTS: acqdate = d.date() acqdatestr = mj_dt.DateToStrDate(acqdate) self.datumL.append(acqdatestr) self.datumD[acqdatestr] = { 'acqdate': acqdate, 'acqdatestr': acqdatestr }
def _ReadClimateIndex(self, srcFPN, comp): import csv print(srcFPN) print() srcCellNull = self.process.proc.srcraw.paramsD[comp]['cellnull'] dstCellNull = self.dstLayer.comp.cellnull print('srcCellNull', srcCellNull) if not self.process.proc.srcraw.paramsD[comp][ 'id'] == self.process.proc.srcraw.paramsD[comp]['datafile']: exitstr = 'Climateindex import: the id and the datafile should have the same name (%s vs %s)' % ( self.process.proc.srcraw.paramsD[comp]['id'], self.process.proc.srcraw.paramsD[comp]['datafile']) exit(exitstr) queryL = [] with open(srcFPN) as f: reader = csv.reader(f, delimiter=' ', skipinitialspace=True) startyear, endyear = next(reader) #print (startyear,endyear) for row in reader: y = row[0] for m in range(1, 13): acqdate = mj_dt.yyyy_mm_dd_Date(y, m, 1) acqdatestr = mj_dt.DateToStrDate(acqdate)[0:6] value = row[m] if y == endyear and float(value) == srcCellNull: continue if float(value) == srcCellNull: value = dstCellNull queryL.append({ 'index': self.process.proc.srcraw.paramsD[comp]['id'], 'acqdate': acqdate, 'acqdatestr': acqdatestr, 'value': value }) if y == endyear: break #After the last year, the next row is the nodata cellNull = next(reader)[0] if not float(cellNull) == srcCellNull: exitstr = 'cellnull for climateindex %s is not correct, should be: %s' % ( comp, cellNull) self.session._InsertClimateIndex(queryL)
def DumpExportDB(self): #(host_name,database_name,user_name,database_password): if self.process.params.dataonly and self.process.params.schemaonly: exit('BOTh dataonly and schemaonly can not be set to TRUE') username, account, password = self.session._SelectUserSecrets() if self.process.dstpath.volume.lower() == 'none': vol = '' else: vol = '/volumes/%s' % (self.process.dstpath.volume) FP = path.join(vol, 'SQLdump') today = kt_dt.DateToStrDate(kt_dt.Today()) #Set filename FN = 'sqldump-%s' % (self.process.params.format) if self.process.params.dataonly: FN += '-dataonly' elif self.process.params.schemaonly: FN += '-schemaonly' FN += '_%s.sql' % (today) sqlFPN = path.join(FP, FN) if not path.exists(FP): makedirs(FP) if path.isfile(sqlFPN) and not self.process.overwrite: return host = 'localhost' db = 'postgres' if self.process.params.cmdpath == 'None': cmd = 'pg_dump' else: cmd = '%s/pg_dump' % (self.process.params.cmdpath) if self.process.params.dataonly: cmd += ' -h {0} -p 5432 -U {1} -F {2} -f {3} -a {4}'\ .format(host,username,self.process.params.format,sqlFPN,db) elif self.process.params.schemaonly: cmd += ' -h {0} -p 5432 -U {1} -F {2} -f {3} -s {4}'\ .format(host,username,self.process.params.format,sqlFPN,db) else: cmd += ' -h {0} -p 5432 -U {1} -F {2} -f {3} {4}'\ .format(host,username,self.process.params.format,sqlFPN,db) system(cmd)
def _ConstructDaacTile(self,tile,sdpath): ''' ''' smapfilename, smapid, source, product, version, folder, acqdate = tile #construct the composition compD = {'source':source, 'product':product, 'version':version, 'folder':folder, 'system':'smap', 'division':'region'} #Invoke the composition comp = SmapComposition(compD) #Set the datum datumD = {'acqdatestr': mj_dt.DateToStrDate(acqdate), 'acqdate':acqdate} #Set the filename FN = smapfilename #Set the locus loc = 'global' #Set the locuspath locusPath = 'global' #Construct the locus dictionary locusD = {'locus':loc, 'path':locusPath} #Invoke and return a SentinelTile return SmapTile(smapid, comp, locusD, datumD, sdpath, FN)
def ExportTableDataCsvSql(self, schema, table): import csv schematab = '%s.%s' % (schema, table) if self.process.dstpath.volume.lower() == 'none': vol = '' else: vol = '/volumes/%s' % (self.process.dstpath.volume) FP = path.join(vol, 'SQLdump', schema, table) today = kt_dt.DateToStrDate(kt_dt.Today()) FN = '%s_sqldump_%s.csv' % (table, today) csvFPN = path.join(FP, FN) if not path.exists(FP): makedirs(FP) if path.isfile(csvFPN) and not self.process.overwrite: return #Get the columns tableColumnDefL = self.session._GetTableColumns(schema, table) tableColNameL = [row[0] for row in tableColumnDefL] query = {'schematab': schematab, 'items': ",".join(tableColNameL)} #Get all the data in the table print("SELECT %(items)s FROM %(schematab)s;" % query) recs = self.session._SelectAllTableRecs(query) #self.cursor.execute("SELECT %(items)s FROM %(schematab)s;" %query) #records = self.cursor.fetchall() if len(recs) == 0: warnstr = ' WARNING, empty sql dump: skipping export to %s' % ( csvFPN) print(warnstr) return #open csv file for writing print(' Dumping db records to', csvFPN) F = open(csvFPN, 'w') wr = csv.writer(F, delimiter=";") wr.writerow(tableColNameL) for row in recs: wr.writerow(row) F.close()
def _ReadCO2records(self, srcFPN, comp): import csv srcCellNull = self.process.proc.srcraw.paramsD[comp]['cellnull'] dstCellNull = self.dstLayer.comp.cellnull if not self.process.proc.srcraw.paramsD[comp][ 'id'] == self.process.proc.srcraw.paramsD[comp]['datafile']: exitstr = 'CO2record import: the id and the datafile should have the same name (%s vs %s)' % ( self.process.proc.srcraw.paramsD[comp]['id'], self.process.proc.srcraw.paramsD[comp]['datafile']) exit(exitstr) queryL = [] with open(srcFPN) as f: reader = csv.reader(f, delimiter=',', skipinitialspace=True) header = next(reader) for row in reader: print('row', row) acqdate = mj_dt.yyyy_mm_dd_Str_ToDate(row[0]) acqdatestr = mj_dt.DateToStrDate(acqdate)[0:6] value = row[3] if float(value) == srcCellNull: value = dstCellNull queryL.append({ 'index': self.process.proc.srcraw.paramsD[comp]['id'], 'acqdate': acqdate, 'acqdatestr': acqdatestr, 'value': value }) self.session._InsertClimateIndex(queryL)
def _ConstructLayer(self,extractD,acqdate): ''' ''' compD = extractD comp = Composition(compD, 'smap', 'region') datumD = {'acqdatestr': mj_dt.DateToStrDate(acqdate), 'acqdate':acqdate} #Set the locus loc = extractD['region'] #Set the locuspath locusPath = extractD['region'] #Construct the locus dictionary locusD = {'locus':loc, 'path':locusPath} filepath = lambda: None filepath.volume = self.process.dstpath.volume; filepath.hdrfiletype = extractD['fileext'] #Create a standard raster layer return RasterLayer(comp, locusD, datumD, filepath) '''
def _ConstructSmapLayer(self,compD,acqdate,compFormatD): ''' ''' comp = Composition(compD, self.process.system.dstsystem, self.process.system.dstdivision) comp._Update(compFormatD) datumD = {'acqdatestr': mj_dt.DateToStrDate(acqdate), 'acqdate':acqdate} #Set the locus loc = 'global' #Set the locuspath locusPath = 'global' #Construct the locus dictionary locusD = {'locus':loc, 'path':locusPath} filepath = lambda: None filepath.volume = self.process.dstpath.volume; filepath.hdrfiletype = self.process.dstpath.hdr #Create a standard reaster layer bandR = RasterLayer(comp, locusD, datumD, filepath) return bandR
def DumpExportTable(self, schema, table): #(host_name,database_name,user_name,database_password): if self.process.params.dataonly and self.process.params.schemaonly: exit('BOTh dataonly and schemaonly can not be set to TRUE') schematab = '%s.%s' % (schema, table) username, account, password = self.session._SelectUserSecrets() if self.process.dstpath.volume.lower() == 'none': vol = '' else: vol = '/volumes/%s' % (self.process.dstpath.volume) FP = path.join(vol, 'SQLdump', schema, table) today = kt_dt.DateToStrDate(kt_dt.Today()) #Set filename #FN = '%s_sqldump_%s' %(table, today) FN = '%s_sqldump-%s' % (table, self.process.params.format) if self.process.params.dataonly: FN += '-dataonly' elif self.process.params.schemaonly: FN += '-schemaonly' FN += '_%s.sql' % (today) sqlFPN = path.join(FP, FN) if not path.exists(FP): makedirs(FP) if path.isfile(sqlFPN) and not self.process.overwrite: return host = 'localhost' db = 'postgres' #pg_dump -h localhost -p 5432 -U karttur -t process.subprocesses -f dbtab.sql postgres ''' cmd = 'pg_dump -h {0} -p 5432 -U {1} -t {2} -Fc -f {3} {4}'\ .format(host,username,schematab,sqlFPN,db) #print (cmd) #BALLE #oscmd = '%(cmdpath)s karttur -U postgres -t %(schematab)s -F c > %(sqlfpn)s' %{'cmdpath':cmdpath,'schematab':schematab, 'sqlfpn':sqlFPN} #print (oscmd) #p = Popen(cmd,shell=True,stdin=PIPE,stdout=PIPE,stderr=PIPE) #p = Popen(cmd,stdin=PIPE,stdout=PIPE,stderr=PIPE) proc = Popen(['pg_dump', '-h', host, '-U', username, '-p', '5432', '-t', schematab, '-f', sqlFPN, db], shell=True, stdin=PIPE) proc.wait() proc = Popen(['pg_dump', '-h', host, '-U', username, '-p', 5432, '-F', 't', '-f', sqlFPN, db], cwd=directory, shell=True, stdin=PIPE) print ('old',cmd) ''' if self.process.params.cmdpath == 'None': cmd = 'pg_dump' else: #print (self.process.params.cmdpath) cmd = '%s/pg_dump' % (self.process.params.cmdpath) if self.process.params.dataonly: cmd += ' -h {0} -p 5432 -U {1} -t {2} -F {3} -f {4} -a {5}'\ .format(host,username,schematab,self.process.params.format,sqlFPN,db) elif self.process.params.schemaonly: cmd += ' -h {0} -p 5432 -U {1} -t {2} -F {3} -f {4} -s {5}'\ .format(host,username,schematab,self.process.params.format,sqlFPN,db) else: cmd += ' -h {0} -p 5432 -U {1} -t {2} -F {3} -f {4} {5}'\ .format(host,username,schematab,self.process.params.format,sqlFPN,db) system(cmd)