def zoom(self): layer = None for lyr in QgsProject.instance().layerStore().mapLayers().values(): if self.lyr_source in os.path.normpath(lyr.source()): layer = lyr break if not layer: raise LookupError( 'Unable to locate layer for extent for admin code {}'.format( self.admin_code)) # Note that this layer will have the selected admin region filtered out, so # that data will not be masked in this area. So need to temporarily remove # this filter and then reapply it. subset_string = layer.subsetString() layer.setSubsetString('') feature = None for f in layer.getFeatures(): if f.attribute(self.field) == self.admin_code: feature = f break if not feature: raise LookupError( 'Unable to locate polygon for admin code {}'.format( self.admin_code)) # TODO: Need to reproject the geometry to match the canvas CRS self.canvas = iface.mapCanvas() # Reapply the original feature filter on this layer layer.setSubsetString(subset_string) self.bbox = feature.geometry().boundingBox() log('Bounding box for zoom is: {}'.format(self.bbox.toString())) self.canvas.setExtent(self.bbox) self.canvas.refresh()
def get_execution(id=None, date=None): log('Fetching executions') query = ['include=script'] if id: query.append(u'user_id={}'.format(quote_plus(id))) if date: query.append(u'updated_at={}'.format(date)) query = "?" + "&".join(query) resp = call_api(u'/api/v1/execution{}'.format(query), method='get', use_token=True) if not resp: return None else: data = resp['data'] # Sort responses in descending order using start time by default data = sorted(data, key=lambda job: job['start_date'], reverse=True) # Convert start/end dates into datatime objects in local time zone for job in data: start_date = datetime.strptime(job['start_date'], '%Y-%m-%dT%H:%M:%S.%f') start_date = start_date.replace(tzinfo=tz.tzutc()) start_date = start_date.astimezone(tz.tzlocal()) job['start_date'] = start_date end_date = datetime.strptime(job['end_date'], '%Y-%m-%dT%H:%M:%S.%f') end_date = end_date.replace(tzinfo=tz.tzutc()) end_date = end_date.astimezone(tz.tzlocal()) job['end_date'] = end_date return data
def read_json(file, verify=True): filename = os.path.join(os.path.dirname(__file__), 'data', file) url = u'https://s3.amazonaws.com/trends.earth/sharing/{}'.format(file) if os.path.exists(filename) and verify: if not check_hash_against_etag(url, filename): os.remove(filename) if not os.path.exists(filename): log(u'Downloading {}'.format(file)) # TODO: Dialog box with two options: # 1) Download # 2) Load from local folder worker = Download(url, filename) worker.start() resp = worker.get_resp() if not resp: return None if not check_hash_against_etag(url, filename): return None with gzip.GzipFile(filename, 'r') as fin: json_bytes = fin.read() json_str = json_bytes.decode('utf-8') return json.loads(json_str)
def read_class_file(f): if not os.access(f, os.R_OK): QtWidgets.QMessageBox.critical( None, QtWidgets.QApplication.translate("Error"), QtWidgets.QApplication.translate(u"Cannot read {}.".format(f), None)) return None with open(f) as class_file: classes = json.load(class_file) if (not isinstance(classes, list) or not len(classes) > 0 or not isinstance(classes[0], dict) or 'Initial_Code' not in classes[0] or 'Final_Code' not in classes[0] or 'Final_Label' not in classes[0]): QtWidgets.QMessageBox.critical( None, QtWidgets.QApplication.translate('DlgCalculateSetAggregation', "Error"), QtWidgets.QApplication.translate( 'DlgCalculateSetAggregation', u"{} does not appear to contain a valid class definition.". format(f))) return None else: log(u'Loaded class definition from {}'.format(f)) return classes
def get_cutoff(f, band_number, band_info, percentiles): if len(percentiles) != 1 and len(percentiles) != 2: raise ValueError( "Percentiles must have length 1 or 2. Percentiles that were passed: {}" .format(percentiles)) d = get_sample(f, band_number) md = np.ma.masked_where(d == band_info['no_data_value'], d) if md.size == 0: # If all of the values are no data, return 0 log('All values are no data') return 0 else: cutoffs = np.nanpercentile(md.compressed(), percentiles) if cutoffs.size == 2: max_cutoff = np.amax(np.absolute(cutoffs)) if max_cutoff < 0: return 0 else: return round_to_n(max_cutoff, 2) elif cutoffs.size == 1: if cutoffs < 0: # Negative cutoffs are not allowed as stretch is either zero # centered or starting at zero return 0 else: return round_to_n(cutoffs, 2) else: # We only get here if cutoffs is not size 1 or 2, which should # never happen, so raise raise ValueError( "Stretch calculation returned cutoffs array of size {} ({})". format(cutoffs.size, cutoffs))
def get_sample(f, band_number, n=1e6): '''Get a gridded sample of a raster dataset''' ds = gdal.Open(f) b = ds.GetRasterBand(band_number) xsize = b.XSize ysize = b.YSize # Select grid size from shortest side to ensure we have enough samples if xsize > ysize: edge = ysize else: edge = xsize grid_size = np.ceil(edge / np.sqrt(n)) if (n > xsize * ysize) or ((grid_size * grid_size) > (xsize * ysize)): # Don't sample if the sample would be larger than the array itself return b.ReadAsArray().astype(np.float) else: rows = np.arange(0, ysize, grid_size) cols = np.arange(0, xsize, grid_size).astype('int64') out = np.zeros((rows.shape[0], cols.shape[0]), np.float32) log("Sampling from a ({}, {}) array to a {} array (grid size: {}, samples: {})" .format(ysize, xsize, out.shape, grid_size, out.shape[0] * out.shape[1])) for n in range(rows.shape[0]): out[n, :] = b.ReadAsArray(0, int(rows[n]), xsize, 1)[:, cols] return out
def extract_zipfile(file, verify=True): filename = os.path.join(os.path.dirname(__file__), 'data', file) url = u'https://s3.amazonaws.com/trends.earth/sharing/{}'.format(file) if os.path.exists(filename) and verify: if not check_hash_against_etag(url, filename): os.remove(filename) if not os.path.exists(filename): log(u'Downloading {}'.format(file)) # TODO: Dialog box with two options: # 1) Download # 2) Load from local folder worker = Download(url, filename) worker.start() resp = worker.get_resp() if not resp: return None if not check_hash_against_etag(url, filename): return None try: with zipfile.ZipFile(filename, 'r') as fin: fin.extractall(os.path.join(os.path.dirname(__file__), 'data')) return True except zipfile.BadZipfile: os.remove(filename) return False
def work(self): ds_in = gdal.Open(self.in_vrt) soc_band = ds_in.GetRasterBand(1) clim_band = ds_in.GetRasterBand(2) block_sizes = soc_band.GetBlockSize() x_block_size = block_sizes[0] y_block_size = block_sizes[1] xsize = soc_band.XSize ysize = soc_band.YSize driver = gdal.GetDriverByName("GTiff") # Need a band for SOC degradation, plus bands for annual SOC, and for # annual LC ds_out = driver.Create(self.out_f, xsize, ysize, 1 + len(self.lc_years) * 2, gdal.GDT_Int16, ['COMPRESS=LZW']) src_gt = ds_in.GetGeoTransform() ds_out.SetGeoTransform(src_gt) out_srs = osr.SpatialReference() out_srs.ImportFromWkt(ds_in.GetProjectionRef()) ds_out.SetProjection(out_srs.ExportToWkt()) blocks = 0 for y in range(0, ysize, y_block_size): if y + y_block_size < ysize: rows = y_block_size else: rows = ysize - y for x in range(0, xsize, x_block_size): if self.killed: log("Processing of {} killed by user after processing {} out of {} blocks." .format(self.prod_out_file, y, ysize)) break self.progress.emit(100 * (float(y) + (float(x) / xsize) * y_block_size) / ysize) if x + x_block_size < xsize: cols = x_block_size else: cols = xsize - x # Write initial soc to band 2 of the output file. Read SOC in # as float so the soc change calculations won't accumulate # error due to repeated truncation of ints soc = np.array(soc_band.ReadAsArray(x, y, cols, rows)).astype(np.float32) ds_out.GetRasterBand(2).WriteArray(soc, x, y) blocks += 1 if self.killed: del ds_in del ds_out os.remove(self.out_f) return None else: return True
def work(self): self.toggle_show_progress.emit(True) self.toggle_show_cancel.emit(True) resp = requests.get(self.url, stream=True) # if resp.status_code == 401: # raise DownloadError(u'Task has expired. Kindly rerun task again') if resp.status_code != 200: log(u'Unexpected HTTP status code ({}) while trying to download {}.' .format(resp.status_code, self.url)) raise DownloadError(u'Unable to start download of {}'.format( self.url)) total_size = int(resp.headers['Content-length']) if total_size < 1e5: total_size_pretty = '{:.2f} KB'.format(round(total_size / 1024, 2)) else: total_size_pretty = '{:.2f} MB'.format(round(total_size * 1e-6, 2)) log(u'Downloading {} ({}) to {}'.format(self.url, total_size_pretty, self.outfile)) bytes_dl = 0 r = requests.get(self.url, stream=True) with open(self.outfile, 'wb') as f: for chunk in r.iter_content(chunk_size=8192): if self.killed == True: log(u"Download {} killed by user".format(self.url)) break elif chunk: # filter out keep-alive new chunks f.write(chunk) bytes_dl += len(chunk) self.progress.emit(100 * float(bytes_dl) / float(total_size)) f.close() if bytes_dl != total_size: log(u"Download error. File size of {} didn't match expected ({} versus {})" .format(self.url, bytes_dl, total_size)) os.remove(self.outfile) if not self.killed: raise DownloadError( u'Final file size of {} does not match expected'.format( self.url)) return None else: log(u"Download of {} complete".format(self.url)) return True
def write_excel_summary(forest_loss, carbon_loss, area_missing, area_water, area_non_forest, area_site, area_forest, initial_carbon_total, year_start, year_end, out_file): def tr(s): return QtWidgets.QApplication.translate("MISLAND", s) wb = openpyxl.load_workbook( os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data', 'summary_table_tc.xlsx')) ########################################################################## # SDG table ws_summary = wb['Total Carbon Summary Table'] ws_summary.cell(8, 3).value = area_forest ws_summary.cell(9, 3).value = area_non_forest ws_summary.cell(10, 3).value = area_water ws_summary.cell(11, 3).value = area_missing ws_summary.cell(15, 4).value = year_start ws_summary.cell(16, 4).value = year_end #ws_summary.cell(10, 3).value = area_site ws_summary.cell(8, 5).value = initial_carbon_total write_col_to_sheet(ws_summary, np.arange(year_start + 1, year_end + 1), 1, 24) # Years write_col_to_sheet(ws_summary, forest_loss, 2, 24) # Years write_col_to_sheet(ws_summary, carbon_loss, 4, 24) # Years try: ws_summary_logo = Image( os.path.join(os.path.dirname(__file__), 'data', 'misland_logo_300.png')) ws_summary.add_image(ws_summary_logo, 'E1') except ImportError: # add_image will fail on computers without PIL installed (this will be # an issue on some Macs, likely others). it is only used here to add # our logo, so no big deal. pass try: wb.save(out_file) except IOError: log(u'Error saving {}'.format(out_file)) return False log(u'Summary table saved to {}'.format(out_file)) return True
def btn_calculate(self): # Note that the super class has several tests in it - if they fail it # returns False, which would mean this function should stop execution # as well. ret = super(DlgCalculateUrbanData, self).btn_calculate() if not ret: return # Limit area for the urban tool to 10,000 sq km aoi_area = self.aoi.get_area() / (1000 * 1000) log(u'AOI area is: {:n}'.format(aoi_area)) if aoi_area > 25000: QtWidgets.QMessageBox.critical(None, self.tr("Error"), self.tr("The bounding box of the requested area (approximately {:.6n} sq km) is too large. The urban area change tool can process a maximum area of 10,000 sq km at a time. Choose a smaller area to process.".format(aoi_area))) return False self.calculate_on_GEE()
def get_header(url): worker = Request(url, 'head') worker.start() resp = worker.get_resp() if resp != None: log(u'Response from "{}" header request: {}'.format(url, resp.status_code)) if resp.status_code == 200: ret = resp.headers else: desc, status = get_error_status(resp) QtWidgets.QMessageBox.critical(None, "Error", u"Error: {} (status {}).".format(desc, status)) ret = None else: log('Header request failed') ret = None return ret
def trans_matrix_set(self, matrix=None): if not matrix: matrix = self.trans_matrix_default for row in range(0, self.deg_def_matrix.rowCount()): for col in range(0, self.deg_def_matrix.columnCount()): # Matrix is actually a list of length NUM_CLASSES * NUM_CLASSES val = matrix[len(self.classes) * row + col] if val == 0: val_str = '0' elif val == -1: val_str = '-' elif val == 1: val_str = '+' else: log('unrecognized value "{}" when setting transition matrix' .format(val)) return False self.deg_def_matrix.cellWidget(row, col).setText(val_str) return True
def login(email=None, password=None): if (email == None): email = get_user_email() if (password == None): password = QtCore.QSettings().value("MISLAND/password", None) if not email or not password: log('API unable to login - check username/password') QtWidgets.QMessageBox.critical(None, QtWidgets.QApplication.translate("MISLAND", "Error"), QtWidgets.QApplication.translate("MISLAND", "Unable to login to MISLAND. Check your username and password.")) return None resp = call_api('/auth', method='post', payload={"email": email, "password": password}) if resp != None: QtCore.QSettings().setValue("MISLAND/email", email) QtCore.QSettings().setValue("MISLAND/password", password) return resp
def trans_matrix_get(self): # Extract trans_matrix from the QTableWidget trans_matrix = [] for row in range(0, self.deg_def_matrix.rowCount()): for col in range(0, self.deg_def_matrix.columnCount()): val = self.deg_def_matrix.cellWidget(row, col).text() if val == "" or val == "0": val = 0 elif val == "-": val = -1 elif val == "+": val = 1 else: log('unrecognized value "{}" when getting transition matrix' .format(val)) raise ValueError( 'unrecognized value "{}" in transition matrix'.format( val)) trans_matrix.append(val) return trans_matrix
def download_timeseries(job, tr): log("processing timeseries results...") table = job['results'].get('table', None) indices = job['params'].get('indices') title = job['params'].get('title') if not table: return None data = [x for x in table if x['name'] == 'mean'][0] dlg_plot = DlgPlotTimeries() labels = { 'title': tr(title), 'bottom': tr('Time'), 'left': [tr('{} Trend'.format(indices)), tr('{} x 10000'.format(indices))] } dlg_plot.plot_data(data['time'], data['y'], labels) dlg_plot.show() dlg_plot.exec_()
def make_summary_table(areas, populations, out_file): def tr(s): return QtWidgets.QApplication.translate("MISLAND", s) wb = openpyxl.load_workbook(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data', 'summary_table_urban.xlsx')) ########################################################################## # SDG table ws_summary = wb['SDG 11.3.1 Summary Table'] write_table_to_sheet(ws_summary, areas, 23, 2) write_table_to_sheet(ws_summary, populations, 37, 2) try: ws_summary_logo = Image(os.path.join(os.path.dirname(__file__), 'data', 'trends_earth_logo_bl_300width.png')) ws_summary.add_image(ws_summary_logo, 'E1') except ImportError: # add_image will fail on computers without PIL installed (this will be # an issue on some Macs, likely others). it is only used here to add # our logo, so no big deal. log('Adding MISLAND logo to worksheet FAILED') pass try: wb.save(out_file) log(u'Summary table saved to {}'.format(out_file)) QtWidgets.QMessageBox.information(None, QtWidgets.QApplication.translate("MISLAND", "Success"), QtWidgets.QApplication.translate("MISLAND", u'Summary table saved to {}'.format(out_file))) except IOError: log(u'Error saving {}'.format(out_file)) QtWidgets.QMessageBox.critical(None, QtWidgets.QApplication.translate("MISLAND", "Error"), QtWidgets.QApplication.translate("MISLAND", u"Error saving output table - check that {} is accessible and not already open.".format(out_file)))
def get_file_metadata(json_file): try: with open(json_file) as f: d = json.load(f) except (OSError, IOError, ValueError) as e: log(u'Error loading {}'.format(json_file)) return None local_raster_schema = LocalRasterSchema() try: d = local_raster_schema.load(d) except ValidationError: log(u'Unable to parse {}'.format(json_file)) return None # Below is a fix for older versions of MISLAND<0.43 that stored the full path # in the metadata f = os.path.join(os.path.dirname(json_file), os.path.basename(os.path.normpath(d['file']))) if not os.access(f, os.R_OK): log(u'Data file {} is missing'.format(f)) return None else: return d
def start(self): try: worker = DownloadWorker(self.url, self.outfile) pause = QtCore.QEventLoop() worker.finished.connect(pause.quit) worker.successfully_finished.connect(self.save_resp) worker.error.connect(self.save_exception) start_worker( worker, iface, QtWidgets.QApplication.translate( "MISLAND", u'Downloading {}').format(self.outfile)) pause.exec_() if self.get_exception(): raise self.get_exception() except requests.exceptions.ChunkedEncodingError: log("Download failed due to ChunkedEncodingError - likely a connection loss" ) QtWidgets.QMessageBox.critical( None, QtWidgets.QApplication.translate("MISLAND", "Error"), QtWidgets.QApplication.translate( "MISLAND", "Download failed. Check your internet connection.")) return False except requests.exceptions.ConnectionError: log("Download failed due to connection error") QtWidgets.QMessageBox.critical( None, QtWidgets.QApplication.translate("MISLAND", "Error"), QtWidgets.QApplication.translate( "MISLAND", "Unable to access internet. Check your internet connection." )) return False except requests.exceptions.Timeout: log('Download timed out.') QtWidgets.QMessageBox.critical( None, QtWidgets.QApplication.translate("MISLAND", "Error"), QtWidgets.QApplication.translate( "MISLAND", "Download timed out. Check your internet connection.")) return False except DownloadError: log("Download failed.") QtWidgets.QMessageBox.critical( None, QtWidgets.QApplication.translate("MISLAND", "Error"), QtWidgets.QApplication.translate( "MISLAND", "Download failed. Task has expired. Kindly rerun task. ")) return False return True
def start(self): try: worker = RequestWorker(self.url, self.method, self.payload, self.headers) pause = QtCore.QEventLoop() worker.finished.connect(pause.quit) worker.successfully_finished.connect(self.save_resp) worker.error.connect(self.save_exception) start_worker(worker, iface, QtWidgets.QApplication.translate("MISLAND", u'Contacting {} server...'.format(self.server_name))) pause.exec_() if self.get_exception(): raise self.get_exception() except requests.exceptions.ConnectionError: log('API unable to access server - check internet connection') QtWidgets.QMessageBox.critical(None, QtWidgets.QApplication.translate("MISLAND", "Error"), QtWidgets.QApplication.translate("MISLAND", u"Unable to login to {} server. Check your internet connection.".format(self.server_name))) resp = None except requests.exceptions.Timeout: log('API unable to login - general error') QtWidgets.QMessageBox.critical(None, QtWidgets.QApplication.translate("MISLAND", "Error"), QtWidgets.QApplication.translate("MISLAND", u"Unable to connect to {} server.".format(self.server_name))) resp = None
def call_api(endpoint, method='get', payload=None, use_token=False): if use_token: login_resp = login() if login_resp: log("API loaded token.") headers = {'Authorization': 'Bearer {}'.format(login_resp['access_token'])} else: return else: log("API no token required.") headers = {} # Only continue if don't need token or if token load was successful if (not use_token) or (login_resp): # Strip password out of payload for printing to QGIS logs if payload: clean_payload = payload.copy() if 'password' in clean_payload: clean_payload['password'] = '******' else: clean_payload = payload log(u'API calling {} with method "{}" and payload: {}'.format(endpoint, method, clean_payload)) worker = Request(API_URL + endpoint, method, payload, headers) worker.start() resp = worker.get_resp() log(u'API response from "{}" request: {}'.format(method, clean_api_response(resp))) else: resp = None if resp != None: if resp.status_code == 200: ret = resp.json() else: desc, status = get_error_status(resp) QtWidgets.QMessageBox.critical(None, "Error", u"Error: {} (status {}).".format(desc, status)) ret = None else: ret = None return ret
def tr_style_text(label, band_info=None): """If no translation is available, use the original label""" val = style_text_dict.get(label, None) log('label is: {}'.format(label)) log('val is: {}'.format(tr(val))) if val: if band_info: return val.format(**band_info['metadata']) else: return val else: log(u'"{}" not found in translation dictionary'.format(label)) if isinstance(label, str): return label else: return str(label)
def delete_layer_by_filename(f): f = os.path.abspath(f) project = QgsProject.instance() for lyr_id in project.mapLayers(): lyr = project.mapLayer(lyr_id) source = os.path.abspath(lyr.source()) if source == f: log('Removing map layer prior to deletion of {}'.format(f)) project.removeMapLayer(lyr_id) try: log('Removing file {}'.format(f)) os.remove(f) except: log('Error removing file at {}'.format(f)) return False break return True
def check_hash_against_etag(url, filename, expected=None): if not expected: h = get_header(url) if not h: log(u"Failed to fetch expected hash for {}".format(filename)) return False else: expected = h.get('ETag', '').strip('"') with open(filename, 'rb') as f: md5hash = hashlib.md5(f.read()).hexdigest() if md5hash == expected: log(u"File hash verified for {}".format(filename)) return True else: log(u"Failed verification of file hash for {}. Expected {}, but got {}" .format(filename, expected, md5hash)) return False
def processAlgorithm(self, parameters, context, feedback): src_file = self.parameterAsFile(parameters,'INPUT', context) year_start = self.parameterAsInt(parameters,'YEAR_START', context) year_end = self.parameterAsInt(parameters,'YEAR_END', context) src_ds = gdal.Open(src_file) band_f_loss = src_ds.GetRasterBand(1) band_tc = src_ds.GetRasterBand(2) block_sizes = band_f_loss.GetBlockSize() xsize = band_f_loss.XSize ysize = band_f_loss.YSize n_out_bands = 1 x_block_size = block_sizes[0] y_block_size = block_sizes[1] src_gt = src_ds.GetGeoTransform() # Width of cells in longitude long_width = src_gt[1] # Set initial lat ot the top left corner latitude lat = src_gt[3] # Width of cells in latitude pixel_height = src_gt[5] area_missing = 0 area_non_forest = 0 area_water = 0 area_site = 0 initial_forest_area = 0 initial_carbon_total = 0 forest_loss = np.zeros(year_end - year_start) carbon_loss = np.zeros(year_end - year_start) blocks = 0 for y in range(0, ysize, y_block_size): if y + y_block_size < ysize: rows = y_block_size else: rows = ysize - y for x in range(0, xsize, x_block_size): if feedback.isCanceled(): log("Processing of {} killed by user after processing {} out of {} blocks.".format(src_file, y, ysize)) break feedback.setProgress(100 * (float(y) + (float(x)/xsize)*y_block_size) / ysize) if x + x_block_size < xsize: cols = x_block_size else: cols = xsize - x f_loss_array = band_f_loss.ReadAsArray(x, y, cols, rows) tc_array = band_tc.ReadAsArray(x, y, cols, rows) # Caculate cell area for each horizontal line cell_areas = np.array([calc_cell_area(lat + pixel_height*n, lat + pixel_height*(n + 1), long_width) for n in range(rows)]) cell_areas.shape = (cell_areas.size, 1) # Make an array of the same size as the input arrays containing # the area of each cell (which is identicalfor all cells ina # given row - cell areas only vary among rows) cell_areas_array = np.repeat(cell_areas, cols, axis=1) initial_forest_pixels = (f_loss_array == 0) | (f_loss_array > (year_start - 2000)) # The site area includes everything that isn't masked area_missing = area_missing + np.sum(((f_loss_array == -32768) | (tc_array == -32768)) * cell_areas_array) area_water = area_water + np.sum((f_loss_array == -2) * cell_areas_array) area_non_forest = area_non_forest + np.sum((f_loss_array == -1) * cell_areas_array) area_site = area_site + np.sum((f_loss_array != -32767) * cell_areas_array) initial_forest_area = initial_forest_area + np.sum(initial_forest_pixels * cell_areas_array) initial_carbon_total = initial_carbon_total + np.sum(initial_forest_pixels * tc_array * (tc_array >= 0) * cell_areas_array) for n in range(year_end - year_start): # Note the codes are year - 2000 forest_loss[n] = forest_loss[n] + np.sum((f_loss_array == year_start - 2000 + n + 1) * cell_areas_array) # Check units here - is tc_array in per m or per ha? carbon_loss[n] = carbon_loss[n] + np.sum((f_loss_array == year_start - 2000 + n + 1) * tc_array * (tc_array >= 0) * cell_areas_array) blocks += 1 lat += pixel_height * rows feedback.setProgress(100) if feedback.isCanceled(): return {} else: # Convert all area tables from meters into hectares forest_loss = forest_loss * 1e-4 # Note that carbon is scaled by 10 carbon_loss = carbon_loss * 1e-4 / 10 area_missing = area_missing * 1e-4 area_water = area_water * 1e-4 area_non_forest = area_non_forest * 1e-4 area_site = area_site * 1e-4 initial_forest_area = initial_forest_area * 1e-4 # Note that carbon is scaled by 10 initial_carbon_total = initial_carbon_total * 1e-4 / 10 return {'FOREST_LOSS': np.array2string(forest_loss), 'CARBON_LOSS': np.array2string(carbon_loss), 'CARBON_INITIAL': initial_carbon_total, 'AREA_FOREST': initial_forest_area, 'AREA_NON_FOREST': area_non_forest, 'AREA_WATER': area_water, 'AREA_MISSING': area_missing, 'AREA_SITE': area_site}
def btn_download(self): rows = list( set(index.row() for index in self.jobs_view.selectedIndexes())) filenames = [] for row in rows: job = self.jobs[row] # Check if we need a download filename - some tasks don't need to save # data, but if any of the chosen \tasks do, then we need to choose a # folder. Right now only TimeSeriesTable doesn't need a filename. if job['results'].get('type') != 'TimeSeriesTable': f = None while not f: # Setup a string to use in filename window if job['task_name']: job_info = u'{} ({})'.format(job['script_name'], job['task_name']) else: job_info = job['script_name'] f, _ = QtWidgets.QFileDialog.getSaveFileName( self, self.tr( u'Choose a filename. Downloading results of: {}'. format(job_info)), self.settings.value("MISLAND/output_dir", None), self.tr('Base filename (*.json)')) # Strip the extension so that it is a basename f = os.path.splitext(f)[0] if f: if os.access(os.path.dirname(f), os.W_OK): self.settings.setValue("MISLAND/output_dir", os.path.dirname(f)) log(u"Downloading results to {} with basename {}". format(os.path.dirname(f), os.path.basename(f))) else: QtWidgets.QMessageBox.critical( None, self.tr("Error"), self. tr(u"Cannot write to {}. Choose a different base filename." .format(f))) else: return False filenames.append(f) else: filenames.append(None) self.close() for row, f in zip(rows, filenames): job = self.jobs[row] log(u"Processing job {}".format(job)) result_type = job['results'].get('type') if result_type == 'CloudResults': download_cloud_results(job, f, self.tr) elif result_type == 'TimeSeriesTable': download_timeseries(job, self.tr) else: raise ValueError( "Unrecognized result type in download results: {}".format( result_type))
def worker_error(e): log(u'Exception in worker thread: {}'.format(e))
def work(self): ds_in = gdal.Open(self.in_vrt) soc_band = ds_in.GetRasterBand(1) clim_band = ds_in.GetRasterBand(2) block_sizes = soc_band.GetBlockSize() x_block_size = block_sizes[0] y_block_size = block_sizes[1] xsize = soc_band.XSize ysize = soc_band.YSize driver = gdal.GetDriverByName("GTiff") # Need a band for SOC degradation, plus bands for annual SOC, and for # annual LC ds_out = driver.Create(self.out_f, xsize, ysize, 1 + len(self.lc_years) * 2, gdal.GDT_Int16, ['COMPRESS=LZW']) src_gt = ds_in.GetGeoTransform() ds_out.SetGeoTransform(src_gt) out_srs = osr.SpatialReference() out_srs.ImportFromWkt(ds_in.GetProjectionRef()) ds_out.SetProjection(out_srs.ExportToWkt()) # Setup a raster of climate regimes to use for coding Fl automatically clim_fl_map = np.array( [[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12], [0, .69, .8, .69, .8, .69, .8, .69, .8, .64, .48, .48, .58]]) # stock change factor for land use - note the 99 and -99 will be # recoded using the chosen Fl option lc_tr_fl_0_map = np.array([[ 11, 12, 13, 14, 15, 16, 17, 21, 22, 23, 24, 25, 26, 27, 31, 32, 33, 34, 35, 36, 37, 41, 42, 43, 44, 45, 46, 47, 51, 52, 53, 54, 55, 56, 57, 61, 62, 63, 64, 65, 66, 67, 71, 72, 73, 74, 75, 76, 77 ], [ 1, 1, 99, 1, 0.1, 0.1, 1, 1, 1, 99, 1, 0.1, 0.1, 1, -99, -99, 1, 1 / 0.71, 0.1, 0.1, 1, 1, 1, 0.71, 1, 0.1, 0.1, 1, 2, 2, 2, 2, 1, 1, 1, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]]) # stock change factor for management regime lc_tr_fm_map = [[ 11, 12, 13, 14, 15, 16, 17, 21, 22, 23, 24, 25, 26, 27, 31, 32, 33, 34, 35, 36, 37, 41, 42, 43, 44, 45, 46, 47, 51, 52, 53, 54, 55, 56, 57, 61, 62, 63, 64, 65, 66, 67, 71, 72, 73, 74, 75, 76, 77 ], [ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]] # stock change factor for input of organic matter lc_tr_fo_map = [[ 11, 12, 13, 14, 15, 16, 17, 21, 22, 23, 24, 25, 26, 27, 31, 32, 33, 34, 35, 36, 37, 41, 42, 43, 44, 45, 46, 47, 51, 52, 53, 54, 55, 56, 57, 61, 62, 63, 64, 65, 66, 67, 71, 72, 73, 74, 75, 76, 77 ], [ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]] blocks = 0 for y in range(0, ysize, y_block_size): if self.killed: log("Processing killed by user after processing {} out of {} blocks." .format(y, ysize)) break self.progress.emit(100 * float(y) / ysize) if y + y_block_size < ysize: rows = y_block_size else: rows = ysize - y for x in range(0, xsize, x_block_size): if x + x_block_size < xsize: cols = x_block_size else: cols = xsize - x # Write initial soc to band 2 of the output file. Read SOC in # as float so the soc change calculations won't accumulate # error due to repeated truncation of ints soc = np.array(soc_band.ReadAsArray(x, y, cols, rows)).astype(np.float32) ds_out.GetRasterBand(2).WriteArray(soc, x, y) if self.fl == 'per pixel': clim = np.array(clim_band.ReadAsArray( x, y, cols, rows)).astype(np.float32) # Setup a raster of climate regimes to use for coding Fl # automatically clim_fl = remap(clim, clim_fl_map) tr_year = np.zeros(np.shape(soc)) soc_chg = np.zeros(np.shape(soc)) for n in range(len(self.lc_years) - 1): t0 = float(self.lc_years[n]) t1 = float(self.lc_years[n + 1]) lc_t0 = ds_in.GetRasterBand( self.lc_band_nums[n]).ReadAsArray(x, y, cols, rows) lc_t1 = ds_in.GetRasterBand( self.lc_band_nums[n + 1]).ReadAsArray( x, y, cols, rows) nodata = (lc_t0 == -32768) | (lc_t1 == -32768) | (soc == -32768) if self.fl == 'per pixel': nodata[clim == -128] = True # compute transition map (first digit for baseline land # cover, and second digit for target year land cover), but # only update where changes actually ocurred. lc_tr = lc_t0 * 10 + lc_t1 lc_tr[(lc_t0 < 1) | (lc_t1 < 1)] < --32768 ###################################################### # If more than one year has elapsed, need to split the # period into two parts, and account for any requried # changes in soc due to past lc transitions over the # first part of the period, and soc changes due to lc # changes that occurred during the period over the # Calculate middle of period. Take the floor so that a # transition that occurs when two lc layers are one # year apart gets the full new soc_chg factor applied # (rather than half), and none of the old soc_chg factor. t_mid = t0 + np.floor((t1 - t0) / 2) # Assume any lc transitions occurred in the middle of the # period since we don't know the actual year of transition. # Apply old soc change for appropriate number of years for # pixels that had a transition > tr_year ago but less than # 20 years prior to the middle of this period. Changes # occur over a twenty year period, and then change stops. if n > 0: # Don't consider transition in lc at beginning of the # period for the first period (as there is no data on # what lc was prior to the first period, so soc_chg is # undefined) yrs_lc_0 = t_mid - tr_year yrs_lc_0[yrs_lc_0 > 20] = 20 soc = soc - soc_chg * yrs_lc_0 soc_chg[yrs_lc_0 == 20] = 0 ###################################################### # Calculate new soc_chg and apply it over the second # half of the period # stock change factor for land use lc_tr_fl = remap( np.array(lc_tr).astype(np.float32), lc_tr_fl_0_map) if self.fl == 'per pixel': lc_tr_fl[lc_tr_fl == 99] = clim_fl[lc_tr_fl == 99] lc_tr_fl[lc_tr_fl == -99] = 1. / clim_fl[lc_tr_fl == -99] else: lc_tr_fl[lc_tr_fl == 99] = self.fl lc_tr_fl[lc_tr_fl == -99] = 1. / self.fl # stock change factor for management regime lc_tr_fm = remap(lc_tr, lc_tr_fm_map) # stock change factor for input of organic matter lc_tr_fo = remap(lc_tr, lc_tr_fo_map) # Set the transition year to the middle of the period for # pixels that had a change in cover tr_year[lc_t0 != lc_t1] = t_mid # Calculate a new soc change for pixels that changed soc_chg[lc_t0 != lc_t1] = (soc[lc_t0 != lc_t1] - \ soc[lc_t0 != lc_t1] * \ lc_tr_fl[lc_t0 != lc_t1] * \ lc_tr_fm[lc_t0 != lc_t1] * \ lc_tr_fo[lc_t0 != lc_t1]) / 20 yrs_lc_1 = t1 - tr_year # Subtract the length of the first half of the period from # yrs_lc_1 for pixels that weren't changed - these pixels # have already had soc_chg applied for the first portion of # the period yrs_lc_1[lc_t0 == lc_t1] = yrs_lc_1[lc_t0 == lc_t1] - (t_mid - t0) yrs_lc_1[yrs_lc_1 > 20] = 20 soc = soc - soc_chg * yrs_lc_1 soc_chg[yrs_lc_1 == 20] = 0 # Write out this SOC layer. Note the first band of ds_out # is soc degradation, and the second band is the initial # soc. As n starts at 0, need to add 3 so that the first # soc band derived from LC change soc band is written to # band 3 of the output file soc[nodata] = -32768 ds_out.GetRasterBand(n + 3).WriteArray(soc, x, y) # Write out the percent change in SOC layer soc_initial = ds_out.GetRasterBand(2).ReadAsArray( x, y, cols, rows) soc_final = ds_out.GetRasterBand(2 + len(self.lc_band_nums) - 1).ReadAsArray( x, y, cols, rows) soc_initial = np.array(soc_initial).astype(np.float32) soc_final = np.array(soc_final).astype(np.float32) soc_pch = ((soc_final - soc_initial) / soc_initial) * 100 soc_pch[nodata] = -32768 ds_out.GetRasterBand(1).WriteArray(soc_pch, x, y) # Write out the initial and final lc layers lc_bl = ds_in.GetRasterBand(self.lc_band_nums[0]).ReadAsArray( x, y, cols, rows) ds_out.GetRasterBand(1 + len(self.lc_band_nums) + 1).WriteArray(lc_bl, x, y) lc_tg = ds_in.GetRasterBand(self.lc_band_nums[-1]).ReadAsArray( x, y, cols, rows) ds_out.GetRasterBand(1 + len(self.lc_band_nums) + 2).WriteArray(lc_tg, x, y) blocks += 1 if self.killed: del ds_in del ds_out os.remove(self.out_f) return None else: return True
def calculate_locally(self): if not self.groupBox_custom_SOC.isChecked(): QtWidgets.QMessageBox.critical( None, self.tr("Error"), self. tr("Due to the options you have chosen, this calculation must occur offline. You MUST select a custom soil organic carbon dataset." )) return if not self.lc_setup_tab.use_custom.isChecked(): QtWidgets.QMessageBox.critical( None, self.tr("Error"), self. tr("Due to the options you have chosen, this calculation must occur offline. You MUST select a custom land cover dataset." )) return if len(self.comboBox_custom_soc.layer_list) == 0: QtWidgets.QMessageBox.critical( None, self.tr("Error"), self. tr("You must add a soil organic carbon layer to your map before you can run the calculation." )) return year_baseline = self.lc_setup_tab.get_initial_year() year_target = self.lc_setup_tab.get_final_year() if int(year_baseline) >= int(year_target): QtWidgets.QMessageBox.information( None, self.tr("Warning"), self. tr('The baseline year ({}) is greater than or equal to the target year ({}) - this analysis might generate strange results.' .format(year_baseline, year_target))) if self.aoi.calc_frac_overlap( QgsGeometry.fromRect(self.lc_setup_tab.use_custom_initial. get_layer().extent())) < .99: QtWidgets.QMessageBox.critical( None, self.tr("Error"), self. tr("Area of interest is not entirely within the initial land cover layer." )) return if self.aoi.calc_frac_overlap( QgsGeometry.fromRect(self.lc_setup_tab.use_custom_final. get_layer().extent())) < .99: QtWidgets.QMessageBox.critical( None, self.tr("Error"), self. tr("Area of interest is not entirely within the final land cover layer." )) return out_f = self.get_save_raster() if not out_f: return self.close() # Select the initial and final bands from initial and final datasets # (in case there is more than one lc band per dataset) lc_initial_vrt = self.lc_setup_tab.use_custom_initial.get_vrt() lc_final_vrt = self.lc_setup_tab.use_custom_final.get_vrt() lc_files = [lc_initial_vrt, lc_final_vrt] lc_years = [ self.lc_setup_tab.get_initial_year(), self.lc_setup_tab.get_final_year() ] lc_vrts = [] for i in range(len(lc_files)): f = tempfile.NamedTemporaryFile(suffix='.vrt').name # Add once since band numbers don't start at zero gdal.BuildVRT( f, lc_files[i], bandList=[i + 1], outputBounds=self.aoi.get_aligned_output_bounds_deprecated( lc_initial_vrt), resolution='highest', resampleAlg=gdal.GRA_NearestNeighbour, separate=True) lc_vrts.append(f) soc_vrt = self.comboBox_custom_soc.get_vrt() climate_zones = os.path.join(os.path.dirname(__file__), 'data', 'IPCC_Climate_Zones.tif') in_files = [soc_vrt, climate_zones] in_files.extend(lc_vrts) in_vrt = tempfile.NamedTemporaryFile(suffix='.vrt').name log(u'Saving SOC input files to {}'.format(in_vrt)) gdal.BuildVRT( in_vrt, in_files, resolution='highest', resampleAlg=gdal.GRA_NearestNeighbour, outputBounds=self.aoi.get_aligned_output_bounds_deprecated( lc_initial_vrt), separate=True) # Lc bands start on band 3 as band 1 is initial soc, and band 2 is # climate zones lc_band_nums = np.arange(len(lc_files)) + 3 log(u'Saving soil organic carbon to {}'.format(out_f)) soc_worker = StartWorker(SOCWorker, 'calculating change in soil organic carbon', in_vrt, out_f, lc_band_nums, lc_years, self.get_fl()) if not soc_worker.success: QtWidgets.QMessageBox.critical( None, self.tr("Error"), self.tr("Error calculating change in soil organic carbon.")) return band_infos = [ BandInfo("Soil organic carbon (degradation)", add_to_map=True, metadata={ 'year_start': lc_years[0], 'year_end': lc_years[-1] }) ] for year in lc_years: if (year == lc_years[0]) or (year == lc_years[-1]): # Add first and last years to map add_to_map = True else: add_to_map = False band_infos.append( BandInfo("Soil organic carbon", add_to_map=add_to_map, metadata={'year': year})) for year in lc_years: band_infos.append( BandInfo("Land cover (7 class)", metadata={'year': year})) out_json = os.path.splitext(out_f)[0] + '.json' create_local_json_metadata(out_json, out_f, band_infos) schema = BandInfoSchema() for band_number in range(len(band_infos)): b = schema.dump(band_infos[band_number]) if b['add_to_map']: # The +1 is because band numbers start at 1, not zero add_layer(out_f, band_number + 1, b)
def work(self): geom = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data', 'aoi.geojson') ds_in = gdal.Open(self.in_f) # resample input layers ds_in = gdal.Warp(self.in_f, ds_in,xRes=0.001, yRes=0.001, resampleAlg="bilinear") # mask input layers to aoi ds_in = gdal.Warp(self.in_f, ds_in, cutlineDSName =geom, cropToCutline = True, dstNodata = np.nan) band_ppt = ds_in.GetRasterBand(1) band_pet = ds_in.GetRasterBand(2) block_sizes = band_ppt.GetBlockSize() x_block_size = block_sizes[0] y_block_size = block_sizes[1] xsize = band_ppt.XSize ysize = band_ppt.YSize log("xsize {}".format(xsize)) log("ysize {}".format(ysize)) driver = gdal.GetDriverByName("GTiff") ds_out = driver.Create(self.out_f, xsize, ysize, 1, gdal.GDT_Float64, ['COMPRESS=LZW']) src_gt = ds_in.GetGeoTransform() ds_out.SetGeoTransform(src_gt) out_srs = osr.SpatialReference() out_srs.ImportFromWkt(ds_in.GetProjectionRef()) ds_out.SetProjection(out_srs.ExportToWkt()) blocks = 0 for y in range(0, ysize, y_block_size): if y + y_block_size < ysize: rows = y_block_size else: rows = ysize - y for x in range(0, xsize, x_block_size): if self.killed: log("Processing killed by user after processing {} out of {} blocks.".format(y, ysize)) break self.progress.emit(100 * (float(y) + (float(x)/xsize)*y_block_size) / ysize) if x + x_block_size < xsize: cols = x_block_size else: cols = xsize - x a_ppt = band_ppt.ReadAsArray(x, y, cols, rows) a_pet = band_pet.ReadAsArray(x, y, cols, rows) a_ppt = a_ppt.astype('float64') a_pet = a_pet.astype('float64') # calculate aridity index a_aridity = (a_ppt / a_pet) # reclassify aridity values to index a_aridity[(a_aridity >= 1.00)] = 1 a_aridity[(a_aridity >= 0.75) & (a_aridity < 1.00)] = 1.05 a_aridity[(a_aridity >= 0.65) & (a_aridity < 0.75)] = 1.15 a_aridity[(a_aridity >= 0.50) & (a_aridity < 0.65)] = 1.25 a_aridity[(a_aridity >= 0.35) & (a_aridity < 0.50)] = 1.35 a_aridity[(a_aridity >= 0.20) & (a_aridity < 0.35)] = 1.45 a_aridity[(a_aridity >= 0.10) & (a_aridity < 0.20)] = 1.55 a_aridity[(a_aridity >= 0.03) & (a_aridity < 0.10)] = 1.75 a_aridity[(a_aridity < 0.03)] = 2 # reclassify precipitation values to index a_ppt[(a_ppt >= 650)] = 1 a_ppt[(a_ppt >= 570) & (a_ppt < 650)] = 1.05 a_ppt[(a_ppt >= 490) & (a_ppt < 570)] = 1.15 a_ppt[(a_ppt >= 440) & (a_ppt < 490)] = 1.25 a_ppt[(a_ppt >= 390) & (a_ppt < 440)] = 1.35 a_ppt[(a_ppt >= 345) & (a_ppt < 390)] = 1.50 a_ppt[(a_ppt >= 310) & (a_ppt < 345)] = 1.65 a_ppt[(a_ppt >= 280) & (a_ppt < 310)] = 1.80 a_ppt[(a_ppt < 280)] = 2 a_cqi = (a_aridity*a_ppt)**(1/2) # reclassify cqi output a_cqi[(a_cqi > 1.81)] = 3 a_cqi[(a_cqi >= 1.15) & (a_cqi <= 1.81)] = 2 a_cqi[(a_cqi < 1.15)] = 1 a_cqi[(a_ppt < 0) | (a_aridity < 0) | (a_pet < 0)] = -32768 ds_out.GetRasterBand(1).WriteArray(a_cqi, x, y) blocks += 1 if self.killed: os.remove(self.out_f) return None else: return True