def settings(self): """ This is called when the used double clicks the routine from the main PyGMI interface""" if 'Raster' not in self.indata: return self.indata['Raster'] = dataprep.merge(self.indata['Raster']) data = self.indata['Raster'] blist = [] for i in data: blist.append(i.dataid) self.cbox_band1.clear() self.cbox_band1.addItems(blist) self.show() QtWidgets.QApplication.processEvents() return True
def settings(self, test=False): """ Entry point. This is called when the used double clicks the routine from the main PyGMI interface. Parameters ---------- test : bool, optional Parameter indicating testing. The default is False. Returns ------- bool True if successful, False otherwise. """ if 'Raster' not in self.indata: return False self.indata['Raster'] = dataprep.merge(self.indata['Raster']) data = self.indata['Raster'] blist = [] for i in data: blist.append(i.dataid) self.cbox_band1.clear() self.cbox_band1.addItems(blist) if test is False: self.show() QtWidgets.QApplication.processEvents() return True
def settings(self, test=None): """ Settings Dialog. This is the main entrypoint into this routine. It also contains the main IGRF code. """ # Variable declaration # Control variables self.proj.set_current(self.indata['Raster'][0].wkt) data = dp.merge(self.indata['Raster']) self.combobox_dtm.clear() self.combobox_mag.clear() for i in data: self.combobox_dtm.addItem(i.dataid) self.combobox_mag.addItem(i.dataid) if len(data) > 1: self.combobox_dtm.setCurrentIndex(1) if test is None: tmp = self.exec_() if tmp == 0: return False self.acceptall() with open(os.path.join(os.path.dirname(__file__), 'IGRF12.cof')) as mdf: modbuff = mdf.readlines() fileline = -1 # First line will be 1 model = [] epoch = [] max1 = [] max2 = [] max3 = [] yrmin = [] yrmax = [] altmin = [] altmax = [] irec_pos = [] # First model will be 0 for i in modbuff: fileline += 1 # On new line if i[:3] == ' ': i2 = i.split() model.append(i2[0]) epoch.append(float(i2[1])) max1.append(int(i2[2])) max2.append(int(i2[3])) max3.append(int(i2[4])) yrmin.append(float(i2[5])) yrmax.append(float(i2[6])) altmin.append(float(i2[7])) altmax.append(float(i2[8])) irec_pos.append(fileline) i = self.combobox_mag.currentIndex() maggrid = data[i] i = self.combobox_dtm.currentIndex() data = data[i] altgrid = data.data.flatten() * 0.001 # in km maxyr = max(yrmax) sdate = self.dateedit.date() sdate = sdate.year()+sdate.dayOfYear()/sdate.daysInYear() alt = self.dsb_alt.value() drows, dcols = data.data.shape dtlx = data.extent[0] dtly = data.extent[-1] xrange = dtlx+data.xdim/2.+np.arange(dcols)*data.xdim yrange = dtly-data.ydim/2.-np.arange(drows)*data.ydim xdat, ydat = np.meshgrid(xrange, yrange) xdat = xdat.flatten() ydat = ydat.flatten() igrf_F = altgrid * 0 igrf_I = altgrid * 0 igrf_D = altgrid * 0 # Pick model yrmax = np.array(yrmax) modelI = sum(yrmax < sdate) igdgc = 1 if maxyr < sdate < maxyr+1: self.reportback('Warning: The date ' + str(sdate) + ' is out of range,') self.reportback('but still within one year of model expiration' ' date.') self.reportback('An updated model file is available before 1.1.' + str(maxyr)) if max2[modelI] == 0: self.getshc(modbuff, 1, irec_pos[modelI], max1[modelI], 0) self.getshc(modbuff, 1, irec_pos[modelI+1], max1[modelI+1], 1) nmax = self.interpsh(sdate, yrmin[modelI], max1[modelI], yrmin[modelI+1], max1[modelI+1], 2) nmax = self.interpsh(sdate+1, yrmin[modelI], max1[modelI], yrmin[modelI+1], max1[modelI+1], 3) else: self.getshc(modbuff, 1, irec_pos[modelI], max1[modelI], 0) self.getshc(modbuff, 0, irec_pos[modelI], max2[modelI], 1) nmax = self.extrapsh(sdate, epoch[modelI], max1[modelI], max2[modelI], 2) nmax = self.extrapsh(sdate+1, epoch[modelI], max1[modelI], max2[modelI], 3) progress = 0 maxlen = xdat.size alli = [] alld = [] allf = [] for i in self.piter(range(maxlen)): if igrf_F.mask[i]: continue tmp = int(i*100/maxlen) if tmp > progress: progress = tmp longitude, latitude, _ = self.ctrans.TransformPoint(xdat[i], ydat[i]) alt = altgrid[i] # Do the first calculations self.shval3(igdgc, latitude, longitude, alt, nmax, 3) self.dihf(3) igrf_F[i] = self.f igrf_I[i] = np.rad2deg(self.i) igrf_D[i] = np.rad2deg(self.d) alli.append(self.i) alld.append(self.d) allf.append(self.f) fmean = np.mean(allf) imean = np.rad2deg(np.mean(alli)) dmean = np.rad2deg(np.mean(alld)) bname = 'Magnetic Data: IGRF Corrected ' bname = bname + 'F:{0:.2f} I:{1:.2f} D:{2:.2f}' bname = bname.format(fmean, imean, dmean) self.outdata['Raster'] = copy.deepcopy(self.indata['Raster']) igrf_F = np.ma.array(igrf_F) igrf_F.shape = data.data.shape igrf_F.mask = np.ma.getmaskarray(data.data) igrf_I = np.ma.array(igrf_I) igrf_I.shape = data.data.shape igrf_I.mask = np.ma.getmaskarray(data.data) igrf_D = np.ma.array(igrf_D) igrf_D.shape = data.data.shape igrf_D.mask = np.ma.getmaskarray(data.data) self.outdata['Raster'].append(copy.deepcopy(data)) self.outdata['Raster'][-1].data = igrf_F self.outdata['Raster'][-1].dataid = 'IGRF' self.outdata['Raster'].append(copy.deepcopy(data)) self.outdata['Raster'][-1].data = igrf_I self.outdata['Raster'][-1].dataid = 'Inclinations' self.outdata['Raster'].append(copy.deepcopy(data)) self.outdata['Raster'][-1].data = igrf_D self.outdata['Raster'][-1].dataid = 'Declinations' self.outdata['Raster'].append(copy.deepcopy(maggrid)) self.outdata['Raster'][-1].data -= igrf_F self.outdata['Raster'][-1].dataid = bname self.reportback('') self.reportback('Mean Values in Calculation') self.reportback('=============================') self.reportback('Total Intensity: {0:.2f}'.format(fmean)) self.reportback('Inclination: {0:.2f}'.format(imean)) self.reportback('Declination: {0:.2f}'.format(dmean)) self.reportback('') self.reportback('Calculation: Completed', True) return True
def export_gdal(self, dat, drv): """ Export to GDAL format Parameters ---------- dat : PyGMI raster Data dataset to export drv : str name of the GDAL driver to use Returns ------- None. """ data = merge(dat) driver = gdal.GetDriverByName(drv) dtype = data[0].data.dtype if dtype == np.uint8: fmt = gdal.GDT_Byte elif dtype == np.int32: fmt = gdal.GDT_Int32 elif dtype == np.float64: fmt = gdal.GDT_Float64 else: fmt = gdal.GDT_Float32 tmp = self.ifile.rpartition('.') if drv == 'GTiff': tmpfile = tmp[0] + '.tif' elif drv == 'EHdr': fmt = gdal.GDT_Float32 dtype = np.float32 tmpfile = tmp[0] + '.bil' elif drv == 'GSBG': tmpfile = tmp[0] + '.grd' fmt = gdal.GDT_Float32 dtype = np.float32 elif drv == 'SAGA': tmpfile = tmp[0] + '.sdat' elif drv == 'HFA': tmpfile = tmp[0] + '.img' else: # ENVI and ER Mapper tmpfile = tmp[0] drows, dcols = data[0].data.shape if drv == 'GTiff' and dtype == np.uint8: out = driver.Create(tmpfile, int(dcols), int(drows), len(data), fmt, options=['COMPRESS=NONE', 'TFW=YES']) elif drv == 'ERS' and 'Cape / TM' in data[0].wkt: tmp = data[0].wkt.split('TM')[1][:2] out = driver.Create( tmpfile, int(dcols), int(drows), len(data), fmt, options=['PROJ=STMLO' + tmp, 'DATUM=CAPE', 'UNITS=METERS']) elif drv == 'ERS' and 'Hartebeesthoek94 / TM' in data[0].wkt: tmp = data[0].wkt.split('TM')[1][:2] out = driver.Create( tmpfile, int(dcols), int(drows), len(data), fmt, options=['PROJ=STMLO' + tmp, 'DATUM=WGS84', 'UNITS=METERS']) else: out = driver.Create(tmpfile, int(dcols), int(drows), len(data), fmt) out.SetGeoTransform(data[0].get_gtr()) out.SetProjection(data[0].wkt) for i, datai in enumerate(data): rtmp = out.GetRasterBand(i + 1) rtmp.SetDescription(datai.dataid) dtmp = np.ma.array(datai.data).astype(dtype) dtmp.set_fill_value(datai.nullvalue) dtmp = dtmp.filled() if dtype == np.uint8: datai.nullvalue = int(datai.nullvalue) rtmp.SetNoDataValue(datai.nullvalue) rtmp.WriteArray(dtmp) rtmp.GetStatistics(False, True) out = None # Close File if drv == 'ENVI': with open(tmpfile + '.hdr', 'a') as myfile: myfile.write('data ignore value = ' + str(data[0].nullvalue))
def settings(self): """ Settings """ localdict = {} bandsall = [] self.bands = {} self.bands['all data'] = 'iall' self.combobox.clear() self.combobox.addItem('all data') if 'Cluster' in self.indata: intype = 'Cluster' elif 'Raster' in self.indata: intype = 'Raster' else: self.parent.showprocesslog('No raster data') return indata = dataprep.merge(self.indata[intype]) for j, i in enumerate(indata): self.combobox.addItem(i.dataid) self.bands[i.dataid] = 'i' + str(j) bandsall.append(i.data) localdict['i' + str(j)] = i.data localdict['iall'] = np.ma.array(bandsall) temp = self.exec_() if temp == 0: return equation = self.textbrowser.toPlainText() if equation == '': return neweq = self.eq_fix(indata, equation) try: findat = ne.evaluate(neweq, localdict) except Exception: QtWidgets.QMessageBox.warning( self.parent, 'Error', ' Nothing processed! Your equation most likely had an error.', QtWidgets.QMessageBox.Ok, QtWidgets.QMessageBox.Ok) return outdata = [] if np.size(findat) == 1: QtWidgets.QMessageBox.warning( self.parent, 'Warning', ' Nothing processed! Your equation outputs a single ' + 'value instead of a minimum of one band.', QtWidgets.QMessageBox.Ok, QtWidgets.QMessageBox.Ok) return elif len(findat.shape) == 2: findat.shape = (1, findat.shape[0], findat.shape[1]) for i, findati in enumerate(findat): mask = np.ma.getmaskarray(indata[i].data) findati[mask] = indata[i].nullvalue outdata.append(copy.copy(indata[i])) outdata[-1].data = np.ma.masked_equal(findati, indata[i].nullvalue) # This is needed to get rid of bad, unmasked values etc. for i, outdatai in enumerate(outdata): outdatai.data.set_fill_value(indata[i].nullvalue) outdatai.data = np.ma.fix_invalid(outdatai.data) if len(outdata) == 1: outdata[0].dataid = equation self.outdata[intype] = outdata return True
def settings(self, equation=None): """ Entry point into item. Parameters ---------- equation : str, optional Equation submitted (for testing). The default is None. Returns ------- bool True if successful, False otherwise. """ localdict = {} bandsall = [] self.bands = {} self.bands['all data'] = 'iall' self.combobox.clear() self.combobox.addItem('all data') if 'Cluster' in self.indata: intype = 'Cluster' elif 'Raster' in self.indata: intype = 'Raster' else: print('No raster data') return False indata = dataprep.merge(self.indata[intype]) for j, i in enumerate(indata): self.combobox.addItem(i.dataid) self.bands[i.dataid] = 'i' + str(j) bandsall.append(i.data) localdict['i' + str(j)] = i.data localdict_list = list(localdict.keys()) localdict['iall'] = np.ma.array(bandsall) if equation is None: temp = self.exec_() if temp == 0: return False equation = self.textbrowser.toPlainText() if equation == '': return False usedbands = [] for i in localdict_list: if i in equation: usedbands.append(i) mask = None for i in usedbands: if mask is None: mask = localdict[i].mask else: mask = np.logical_or(mask, localdict[i].mask) neweq = self.eq_fix(indata, equation) try: findat = ne.evaluate(neweq, localdict) except Exception: QtWidgets.QMessageBox.warning( self.parent, 'Error', ' Nothing processed! Your equation most likely had an error.', QtWidgets.QMessageBox.Ok) return False outdata = [] if np.size(findat) == 1: QtWidgets.QMessageBox.warning( self.parent, 'Warning', ' Nothing processed! Your equation outputs a single ' + 'value instead of a minimum of one band.', QtWidgets.QMessageBox.Ok) return False if len(findat.shape) == 2: findat.shape = (1, findat.shape[0], findat.shape[1]) for i, findati in enumerate(findat): findati[mask] = indata[i].nullvalue outdata.append(copy.copy(indata[i])) outdata[-1].data = np.ma.masked_equal(findati, indata[i].nullvalue) outdata[-1].nullvalue = indata[i].nullvalue # This is needed to get rid of bad, unmasked values etc. for i, outdatai in enumerate(outdata): outdatai.data.set_fill_value(indata[i].nullvalue) outdatai.data = np.ma.fix_invalid(outdatai.data) if len(outdata) == 1: outdata[0].dataid = equation self.outdata[intype] = outdata # breakpoint() return True
def settings(self): """ Settings Dialog. This is the main entrypoint into this routine. It also contains the main IGRF code. """ # Variable declaration # Control variables self.proj.set_current(self.indata['Raster'][0].wkt) data = dp.merge(self.indata['Raster']) self.combobox_dtm.clear() self.combobox_mag.clear() for i in data: self.combobox_dtm.addItem(i.dataid) self.combobox_mag.addItem(i.dataid) tmp = self.exec_() if tmp == 1: self.acceptall() tmp = True else: return False mdf = open(__file__.rpartition('\\')[0] + '\\IGRF11.cof') modbuff = mdf.readlines() fileline = -1 # First line will be 1 model = [] epoch = [] max1 = [] max2 = [] max3 = [] yrmin = [] yrmax = [] altmin = [] altmax = [] irec_pos = [] # First model will be 0 for i in modbuff: fileline += 1 # On new line if i[:3] == ' ': i2 = i.split() model.append(i2[0]) epoch.append(float(i2[1])) max1.append(int(i2[2])) max2.append(int(i2[3])) max3.append(int(i2[4])) yrmin.append(float(i2[5])) yrmax.append(float(i2[6])) altmin.append(float(i2[7])) altmax.append(float(i2[8])) irec_pos.append(fileline) i = self.combobox_mag.currentIndex() maggrid = data[i] i = self.combobox_dtm.currentIndex() data = data[i] altgrid = data.data.flatten() * 0.001 # in km maxyr = max(yrmax) sdate = self.dateedit.date() sdate = sdate.year() + sdate.dayOfYear() / sdate.daysInYear() alt = self.dsb_alt.value() xrange = data.tlx + data.xdim / 2. + np.arange(data.cols) * data.xdim yrange = data.tly - data.ydim / 2. - np.arange(data.rows) * data.ydim xdat, ydat = np.meshgrid(xrange, yrange) xdat = xdat.flatten() ydat = ydat.flatten() igrf_F = altgrid * 0 # Pick model yrmax = np.array(yrmax) modelI = sum(yrmax < sdate) igdgc = 1 if (sdate > maxyr) and (sdate < maxyr + 1): self.reportback("Warning: The date " + str(sdate) + " is out of range,") self.reportback("but still within one year of model expiration" " date.") self.reportback("An updated model file is available before 1.1." + str(maxyr)) if max2[modelI] == 0: self.getshc(modbuff, 1, irec_pos[modelI], max1[modelI], 0) self.getshc(modbuff, 1, irec_pos[modelI + 1], max1[modelI + 1], 1) nmax = self.interpsh(sdate, yrmin[modelI], max1[modelI], yrmin[modelI + 1], max1[modelI + 1], 2) nmax = self.interpsh(sdate + 1, yrmin[modelI], max1[modelI], yrmin[modelI + 1], max1[modelI + 1], 3) else: self.getshc(modbuff, 1, irec_pos[modelI], max1[modelI], 0) self.getshc(modbuff, 0, irec_pos[modelI], max2[modelI], 1) nmax = self.extrapsh(sdate, epoch[modelI], max1[modelI], max2[modelI], 2) nmax = self.extrapsh(sdate + 1, epoch[modelI], max1[modelI], max2[modelI], 3) progress = 0 maxlen = xdat.size for i in self.pbar.iter(range(maxlen)): if igrf_F.mask[i] == True: continue tmp = int(i * 100 / maxlen) if tmp > progress: progress = tmp longitude, latitude, _ = self.ctrans.TransformPoint( xdat[i], ydat[i]) alt = altgrid[i] # Do the first calculations self.shval3(igdgc, latitude, longitude, alt, nmax, 3) self.dihf(3) igrf_F[i] = self.f self.outdata['Raster'] = copy.deepcopy(self.indata['Raster']) igrf_F = np.ma.array(igrf_F) igrf_F.shape = data.data.shape igrf_F.mask = np.ma.getmaskarray(data.data) self.outdata['Raster'].append(copy.deepcopy(data)) self.outdata['Raster'][-1].data = igrf_F self.outdata['Raster'][-1].dataid = 'IGRF' self.outdata['Raster'].append(copy.deepcopy(maggrid)) self.outdata['Raster'][-1].data -= igrf_F self.outdata['Raster'][-1].dataid = 'Magnetic Data: IGRF Corrected' self.reportback('') self.reportback('Latest Values in Calculation') self.reportback('=============================') self.reportback('Total Intensity: ' + str(self.f)) self.reportback('Inclination: ' + str(np.rad2deg(self.i))) self.reportback('Declination: ' + str(np.rad2deg(self.d))) self.reportback('') self.reportback('Calculation: Completed', True) return True
def settings(self): """ Settings Dialog. This is the main entrypoint into this routine. It also contains the main IGRF code. """ # Variable declaration # Control variables self.proj.set_current(self.indata['Raster'][0].wkt) data = dp.merge(self.indata['Raster']) self.combobox_dtm.clear() self.combobox_mag.clear() for i in data: self.combobox_dtm.addItem(i.dataid) self.combobox_mag.addItem(i.dataid) tmp = self.exec_() if tmp == 1: self.acceptall() tmp = True else: return False mdf = open(os.path.join(os.path.dirname(__file__), 'IGRF12.cof')) modbuff = mdf.readlines() fileline = -1 # First line will be 1 model = [] epoch = [] max1 = [] max2 = [] max3 = [] yrmin = [] yrmax = [] altmin = [] altmax = [] irec_pos = [] # First model will be 0 for i in modbuff: fileline += 1 # On new line if i[:3] == ' ': i2 = i.split() model.append(i2[0]) epoch.append(float(i2[1])) max1.append(int(i2[2])) max2.append(int(i2[3])) max3.append(int(i2[4])) yrmin.append(float(i2[5])) yrmax.append(float(i2[6])) altmin.append(float(i2[7])) altmax.append(float(i2[8])) irec_pos.append(fileline) i = self.combobox_mag.currentIndex() maggrid = data[i] i = self.combobox_dtm.currentIndex() data = data[i] altgrid = data.data.flatten() * 0.001 # in km maxyr = max(yrmax) sdate = self.dateedit.date() sdate = sdate.year()+sdate.dayOfYear()/sdate.daysInYear() alt = self.dsb_alt.value() xrange = data.tlx+data.xdim/2.+np.arange(data.cols)*data.xdim yrange = data.tly-data.ydim/2.-np.arange(data.rows)*data.ydim xdat, ydat = np.meshgrid(xrange, yrange) xdat = xdat.flatten() ydat = ydat.flatten() igrf_F = altgrid * 0 # Pick model yrmax = np.array(yrmax) modelI = sum(yrmax < sdate) igdgc = 1 if (sdate > maxyr) and (sdate < maxyr+1): self.reportback("Warning: The date " + str(sdate) + " is out of range,") self.reportback("but still within one year of model expiration" " date.") self.reportback("An updated model file is available before 1.1." + str(maxyr)) if max2[modelI] == 0: self.getshc(modbuff, 1, irec_pos[modelI], max1[modelI], 0) self.getshc(modbuff, 1, irec_pos[modelI+1], max1[modelI+1], 1) nmax = self.interpsh(sdate, yrmin[modelI], max1[modelI], yrmin[modelI+1], max1[modelI+1], 2) nmax = self.interpsh(sdate+1, yrmin[modelI], max1[modelI], yrmin[modelI+1], max1[modelI+1], 3) else: self.getshc(modbuff, 1, irec_pos[modelI], max1[modelI], 0) self.getshc(modbuff, 0, irec_pos[modelI], max2[modelI], 1) nmax = self.extrapsh(sdate, epoch[modelI], max1[modelI], max2[modelI], 2) nmax = self.extrapsh(sdate+1, epoch[modelI], max1[modelI], max2[modelI], 3) progress = 0 maxlen = xdat.size for i in self.pbar.iter(range(maxlen)): if igrf_F.mask[i] == True: continue tmp = int(i*100/maxlen) if tmp > progress: progress = tmp longitude, latitude, _ = self.ctrans.TransformPoint(xdat[i], ydat[i]) alt = altgrid[i] # Do the first calculations self.shval3(igdgc, latitude, longitude, alt, nmax, 3) self.dihf(3) igrf_F[i] = self.f self.outdata['Raster'] = copy.deepcopy(self.indata['Raster']) igrf_F = np.ma.array(igrf_F) igrf_F.shape = data.data.shape igrf_F.mask = np.ma.getmaskarray(data.data) self.outdata['Raster'].append(copy.deepcopy(data)) self.outdata['Raster'][-1].data = igrf_F self.outdata['Raster'][-1].dataid = 'IGRF' self.outdata['Raster'].append(copy.deepcopy(maggrid)) self.outdata['Raster'][-1].data -= igrf_F self.outdata['Raster'][-1].dataid = 'Magnetic Data: IGRF Corrected' self.reportback('') self.reportback('Latest Values in Calculation') self.reportback('=============================') self.reportback('Total Intensity: '+str(self.f)) self.reportback('Inclination: '+str(np.rad2deg(self.i))) self.reportback('Declination: '+str(np.rad2deg(self.d))) self.reportback('') self.reportback('Calculation: Completed', True) return True
def export_gdal(self, dat, drv): """ Export to GDAL format Parameters ---------- dat : PyGMI raster Data dataset to export drv : str name of the GDAL driver to use """ data = merge(dat) xmin = data[0].tlx ymax = data[0].tly driver = gdal.GetDriverByName(drv) dtype = data[0].data.dtype if dtype == np.uint8: fmt = gdal.GDT_Byte elif dtype == np.int32: fmt = gdal.GDT_Int32 elif dtype == np.float64: fmt = gdal.GDT_Float64 else: fmt = gdal.GDT_Float32 tmp = self.ifile.rpartition('.') if drv == 'GTiff': tmpfile = tmp[0] + '.tif' elif drv == 'EHdr': fmt = gdal.GDT_Float32 dtype = np.float32 tmpfile = tmp[0] + '.bil' else: tmpfile = tmp[0] if drv == 'GTiff' and dtype == np.uint8: out = driver.Create(tmpfile, int(data[0].cols), int(data[0].rows), len(data), fmt, options=['COMPRESS=NONE', 'TFW=YES']) elif drv == 'ERS' and 'Cape / TM' in data[0].wkt: tmp = data[0].wkt.split('TM')[1][:2] out = driver.Create(tmpfile, int(data[0].cols), int(data[0].rows), len(data), fmt, options=['PROJ=STMLO'+tmp, 'DATUM=CAPE', 'UNITS=METERS']) elif drv == 'ERS' and 'Hartebeesthoek94 / TM' in data[0].wkt: tmp = data[0].wkt.split('TM')[1][:2] out = driver.Create(tmpfile, int(data[0].cols), int(data[0].rows), len(data), fmt, options=['PROJ=STMLO'+tmp, 'DATUM=WGS84', 'UNITS=METERS']) else: out = driver.Create(tmpfile, int(data[0].cols), int(data[0].rows), len(data), fmt) out.SetGeoTransform([xmin, data[0].xdim, 0, ymax, 0, -data[0].ydim]) out.SetProjection(data[0].wkt) for i, datai in enumerate(data): rtmp = out.GetRasterBand(i+1) rtmp.SetDescription(datai.dataid) dtmp = np.ma.array(datai.data).astype(dtype) # This section tries to overcome null values with round off error # in 32-bit numbers. if dtype == np.float32: datai.nullvalue = np.float64(np.float32(datai.nullvalue)) if datai.data.min() > -1e+10: datai.nullvalue = np.float64(np.float32(-1e+10)) elif datai.data.max() < 1e+10: datai.nullvalue = np.float64(np.float32(1e+10)) elif dtype == np.float or dtype == np.float64: datai.nullvalue = np.float64(dtmp.fill_value) elif dtype == np.uint8: datai.nullvalue = 0 # specify 0, since fill value is 999999 elif dtype == np.int32: datai.nullvalue = np.uint32(dtmp.fill_value) dtmp.set_fill_value(datai.nullvalue) dtmp = dtmp.filled() if dtype == np.uint8: datai.nullvalue = int(datai.nullvalue) if drv != 'GTiff': rtmp.SetNoDataValue(datai.nullvalue) elif len(data) == 1: rtmp.SetNoDataValue(datai.nullvalue) rtmp.WriteArray(dtmp) out = None # Close File if drv == 'ENVI': with open(tmpfile+'.hdr', 'a') as myfile: myfile.write('data ignore value = ' + str(data[0].nullvalue))
def settings(self): """ Settings """ localdict = {} bandsall = [] self.bands = {} self.bands['all data'] = 'iall' self.combobox.clear() self.combobox.addItem('all data') if 'Cluster' in self.indata: intype = 'Cluster' elif 'Raster' in self.indata: intype = 'Raster' else: self.parent.showprocesslog('No raster data') return indata = dataprep.merge(self.indata[intype]) for j, i in enumerate(indata): self.combobox.addItem(i.dataid) self.bands[i.dataid] = 'i'+str(j) bandsall.append(i.data) localdict['i'+str(j)] = i.data localdict['iall'] = np.ma.array(bandsall) temp = self.exec_() if temp == 0: return equation = self.textbrowser.toPlainText() if equation == '': return neweq = self.eq_fix(indata, equation) try: findat = ne.evaluate(neweq, localdict) except Exception: QtWidgets.QMessageBox.warning( self.parent, 'Error', ' Nothing processed! Your equation most likely had an error.', QtWidgets.QMessageBox.Ok, QtWidgets.QMessageBox.Ok) return outdata = [] if np.size(findat) == 1: QtWidgets.QMessageBox.warning( self.parent, 'Warning', ' Nothing processed! Your equation outputs a single ' + 'value instead of a minimum of one band.', QtWidgets.QMessageBox.Ok, QtWidgets.QMessageBox.Ok) return elif len(findat.shape) == 2: findat.shape = (1, findat.shape[0], findat.shape[1]) for i, findati in enumerate(findat): mask = np.ma.getmaskarray(indata[i].data) findati[mask] = indata[i].nullvalue outdata.append(copy.copy(indata[i])) outdata[-1].data = np.ma.masked_equal(findati, indata[i].nullvalue) # This is needed to get rid of bad, unmasked values etc. for i, outdatai in enumerate(outdata): outdatai.data.set_fill_value(indata[i].nullvalue) outdatai.data = np.ma.fix_invalid(outdatai.data) if len(outdata) == 1: outdata[0].dataid = equation self.outdata[intype] = outdata return True
def export_gdal(self, dat, drv): """ Export to GDAL format Parameters ---------- dat : PyGMI raster Data dataset to export drv : str name of the GDAL driver to use """ data = merge(dat) xmin = data[0].tlx ymax = data[0].tly driver = gdal.GetDriverByName(drv) dtype = data[0].data.dtype if dtype == np.uint8: fmt = gdal.GDT_Byte elif dtype == np.int32: fmt = gdal.GDT_Int32 elif dtype == np.float64: fmt = gdal.GDT_Float64 else: fmt = gdal.GDT_Float32 tmp = self.ifile.rpartition('.') if drv == 'GTiff': tmpfile = tmp[0] + '.tif' elif drv == 'EHdr': fmt = gdal.GDT_Float32 dtype = np.float32 tmpfile = tmp[0] + '.bil' else: tmpfile = tmp[0] if drv == 'GTiff' and dtype == np.uint8: out = driver.Create(tmpfile, int(data[0].cols), int(data[0].rows), len(data), fmt, options=['COMPRESS=NONE', 'TFW=YES']) elif drv == 'ERS' and 'Cape / TM' in data[0].wkt: tmp = data[0].wkt.split('TM')[1][:2] out = driver.Create( tmpfile, int(data[0].cols), int(data[0].rows), len(data), fmt, options=['PROJ=STMLO' + tmp, 'DATUM=CAPE', 'UNITS=METERS']) elif drv == 'ERS' and 'Hartebeesthoek94 / TM' in data[0].wkt: tmp = data[0].wkt.split('TM')[1][:2] out = driver.Create( tmpfile, int(data[0].cols), int(data[0].rows), len(data), fmt, options=['PROJ=STMLO' + tmp, 'DATUM=WGS84', 'UNITS=METERS']) else: out = driver.Create(tmpfile, int(data[0].cols), int(data[0].rows), len(data), fmt) out.SetGeoTransform([xmin, data[0].xdim, 0, ymax, 0, -data[0].ydim]) out.SetProjection(data[0].wkt) for i, datai in enumerate(data): rtmp = out.GetRasterBand(i + 1) rtmp.SetDescription(datai.dataid) dtmp = np.ma.array(datai.data).astype(dtype) # This section tries to overcome null values with round off error # in 32-bit numbers. if dtype == np.float32: datai.nullvalue = np.float64(np.float32(datai.nullvalue)) if datai.data.min() > -1e+10: datai.nullvalue = np.float64(np.float32(-1e+10)) elif datai.data.max() < 1e+10: datai.nullvalue = np.float64(np.float32(1e+10)) elif dtype == np.float or dtype == np.float64: datai.nullvalue = np.float64(dtmp.fill_value) elif dtype == np.uint8: datai.nullvalue = 0 # specify 0, since fill value is 999999 elif dtype == np.int32: datai.nullvalue = np.uint32(dtmp.fill_value) dtmp.set_fill_value(datai.nullvalue) dtmp = dtmp.filled() if dtype == np.uint8: datai.nullvalue = int(datai.nullvalue) if drv != 'GTiff': rtmp.SetNoDataValue(datai.nullvalue) elif len(data) == 1: rtmp.SetNoDataValue(datai.nullvalue) rtmp.WriteArray(dtmp) out = None # Close File if drv == 'ENVI': with open(tmpfile + '.hdr', 'a') as myfile: myfile.write('data ignore value = ' + str(data[0].nullvalue))