def get_gam(ncs_reference, PAmask): from netCDF4 import Dataset from os.path import basename from numpy import squeeze, ravel, isnan, nan, array, reshape from flyingpigeon.utils import get_variable try: from rpy2.robjects.packages import importr import rpy2.robjects as ro import rpy2.robjects.numpy2ri rpy2.robjects.numpy2ri.activate() base = importr("base") stats = importr("stats") mgcv = importr("mgcv") logger.info('rpy2 modules imported') except Exception as e: msg = 'failed to import rpy2 modules %s' % e logger.debug(msg) raise Exception(msg) try: data = {'PA': ro.FloatVector(ravel(PAmask))} domain = PAmask.shape logger.info('mask data converted to R float vector') except Exception as e: msg = 'failed to convert mask to R vector' form = 'PA ~ ' ncs_reference.sort() try: for i, nc in enumerate(ncs_reference): var = get_variable(nc) agg = basename(nc).split('_')[-2] ds = Dataset(nc) vals = squeeze(ds.variables[var]) vals[vals > 1000] = 0 vals[isnan(PAmask)] = nan indice = '%s_%s' % (var, agg) data[str(indice)] = ro.FloatVector(ravel(vals)) if i == 0: form = form + 's(%s, k=3)' % indice else: form = form + ' + s(%s, k=3)' % indice except Exception as e: logger.debug('form string generation for gam failed') dataf = ro.DataFrame(data) eq = ro.Formula(str(form)) gam_model = mgcv.gam(base.eval(eq), data=dataf, family=stats.binomial(), scale=-1, na_action=stats.na_exclude) # grdevices = importr('grDevices') ### ########################### # plot response curves ### ########################### from flyingpigeon.visualisation import concat_images from tempfile import mkstemp infos = [] for i in range(1, len(ncs_reference) + 1): #ip, info = mkstemp(dir='.',suffix='.pdf') ip, info = mkstemp(dir='.', suffix='.png') infos.append(info) grdevices.png(filename=info) #grdevices.pdf(filename=info) #ylim = ro.IntVector([-6,6]) trans = ro.r('function(x){exp(x)/(1+exp(x))}') mgcv.plot_gam( gam_model, trans=trans, shade='T', col='black', select=i, ylab='Predicted Probability', rug=False, cex_lab=1.4, cex_axis=1.4, ) # #ylim=ylim, , grdevices.dev_off() infos_concat = concat_images(infos, orientation='h') predict_gam = mgcv.predict_gam(gam_model, type="response", progress="text", na_action=stats.na_exclude) #, prediction = array(predict_gam).reshape(domain) return gam_model, prediction, infos_concat
def get_gam(ncs_reference, PAmask): from netCDF4 import Dataset from os.path import basename from numpy import squeeze, ravel, isnan, nan, array, reshape from flyingpigeon.utils import get_variable try: from rpy2.robjects.packages import importr import rpy2.robjects as ro import rpy2.robjects.numpy2ri rpy2.robjects.numpy2ri.activate() base = importr("base") stats = importr("stats") mgcv = importr("mgcv") logger.info('rpy2 modules imported') except Exception as e: msg = 'failed to import rpy2 modules %s' % e logger.debug(msg) raise Exception(msg) try: data = {'PA': ro.FloatVector(ravel(PAmask))} domain = PAmask.shape logger.info('mask data converted to R float vector') except Exception as e: msg = 'failed to convert mask to R vector' form = 'PA ~ ' ncs_reference.sort() try: for i , nc in enumerate(ncs_reference): var = get_variable(nc) agg = basename(nc).split('_')[-2] ds = Dataset(nc) vals = squeeze(ds.variables[var]) vals[vals > 1000 ] = 0 vals[isnan(PAmask)] = nan indice = '%s_%s' % (var, agg) data[str(indice)] = ro.FloatVector(ravel(vals)) if i == 0: form = form + 's(%s, k=3)' % indice else: form = form + ' + s(%s, k=3)' % indice except Exception as e: logger.debug('form string generation for gam failed') dataf = ro.DataFrame(data) eq = ro.Formula(str(form)) gam_model = mgcv.gam(base.eval(eq), data=dataf, family=stats.binomial(), scale=-1, na_action=stats.na_exclude) # grdevices = importr('grDevices') ### ########################### # plot response curves ### ########################### from flyingpigeon.visualisation import concat_images from tempfile import mkstemp infos = [] for i in range(1,len(ncs_reference)+1): #ip, info = mkstemp(dir='.',suffix='.pdf') ip, info = mkstemp(dir='.',suffix='.png') infos.append(info) grdevices.png(filename=info) #grdevices.pdf(filename=info) #ylim = ro.IntVector([-6,6]) trans = ro.r('function(x){exp(x)/(1+exp(x))}') mgcv.plot_gam(gam_model, trans=trans, shade='T', col='black',select=i,ylab='Predicted Probability',rug=False , cex_lab = 1.4, cex_axis = 1.4, ) # #ylim=ylim, , grdevices.dev_off() infos_concat = concat_images(infos, orientation='h') predict_gam = mgcv.predict_gam(gam_model, type="response", progress="text", na_action=stats.na_exclude) #, prediction = array(predict_gam).reshape(domain) return gam_model, prediction, infos_concat
def execute(self): from os.path import basename from flyingpigeon import sdm from flyingpigeon.utils import archive, archiveextract # , get_domain from flyingpigeon.visualisation import map_PAmask init_process_logger('log.txt') self.output_log.setValue('log.txt') self.status.set('Start process', 0) try: self.status.set('reading the arguments', 5) resources = archiveextract( self.getInputValues(identifier='input_indices')) csv_file = self.getInputValues(identifier='gbif')[0] period = self.getInputValues(identifier='period') period = period[0] archive_format = self.archive_format.getValue() except: logger.error('failed to read in the arguments') try: self.status.set('read in latlon coordinates', 10) latlon = sdm.latlon_gbifcsv(csv_file) except: logger.exception('failed to extract the latlon points') try: self.status.set('plot map', 20) from flyingpigeon.visualisation import map_gbifoccurrences # latlon = sdm.latlon_gbifdic(gbifdic) occurence_map = map_gbifoccurrences(latlon) except: logger.exception('failed to plot occurence map') # try: # self.status.set('get domain', 30) # domains = set() # for indice in resources: # # get_domain works only if metadata are set in a correct way # domains = domains.union([basename(indice).split('_')[1]]) # if len(domains) == 1: # domain = list(domains)[0] # logger.info('Domain %s found in indices files' % domain) # else: # logger.warn('NOT a single domain in indices files %s' % domains) # except: # logger.exception('failed to get domains') try: # sort indices indices_dic = sdm.sort_indices(resources) logger.info('indice files sorted for %s Datasets' % len(indices_dic.keys())) except: msg = 'failed to sort indices' logger.exception(msg) raise Exception(msg) ncs_references = [] species_files = [] stat_infos = [] PAmask_pngs = [] self.status.set('Start processing for %s Datasets' % len(indices_dic.keys())) for count, key in enumerate(indices_dic.keys()): try: staus_nr = 40 + count * 10 self.status.set('Start processing of %s' % key, staus_nr) ncs = indices_dic[key] logger.info('with %s files' % len(ncs)) try: self.status.set('generating the PA mask', 20) PAmask = sdm.get_PAmask(coordinates=latlon, nc=ncs[0]) logger.info('PA mask sucessfully generated') except: logger.exception('failed to generate the PA mask') try: self.status.set('Ploting PA mask', 25) PAmask_pngs.extend([map_PAmask(PAmask)]) except: logger.exception('failed to plot the PA mask') try: ncs_reference = sdm.get_reference(ncs_indices=ncs, period=period) ncs_references.extend(ncs_reference) logger.info('reference indice calculated %s ' % ncs_references) self.status.set('reference indice calculated', staus_nr + 2) except: msg = 'failed to calculate the reference' logger.exception(msg) # raise Exception(msg) try: gam_model, predict_gam, gam_info = sdm.get_gam( ncs_reference, PAmask, modelname=key) stat_infos.append(gam_info) self.status.set('GAM sucessfully trained', staus_nr + 5) except: msg = 'failed to train GAM for %s' % (key) logger.exception(msg) try: prediction = sdm.get_prediction(gam_model, ncs) self.status.set('prediction done', staus_nr + 7) except: msg = 'failed to predict tree occurence' logger.exception(msg) # raise Exception(msg) # try: # self.status.set('land sea mask for predicted data', staus_nr + 8) # from numpy import invert, isnan, nan, broadcast_arrays # , array, zeros, linspace, meshgrid # mask = invert(isnan(PAmask)) # mask = broadcast_arrays(prediction, mask)[1] # prediction[mask is False] = nan # except: # logger.exception('failed to mask predicted data') try: species_files.append(sdm.write_to_file(ncs[0], prediction)) logger.info('Favourabillity written to file') except: msg = 'failed to write species file' logger.exception(msg) # raise Exception(msg) except: msg = 'failed to process SDM chain for %s ' % key logger.exception(msg) # raise Exception(msg) try: archive_references = None archive_references = archive(ncs_references, format=archive_format) logger.info('indices 2D added to archive') except: msg = 'failed adding 2D indices to archive' logger.exception(msg) raise Exception(msg) archive_predicion = None try: archive_predicion = archive(species_files, format=archive_format) logger.info('species_files added to archive') except: msg = 'failed adding species_files indices to archive' logger.exception(msg) raise Exception(msg) try: from flyingpigeon.visualisation import pdfmerge, concat_images stat_infosconcat = pdfmerge(stat_infos) logger.debug('pngs %s' % PAmask_pngs) PAmask_png = concat_images(PAmask_pngs, orientation='h') logger.info('stat infos pdfs and mask pngs merged') except: logger.exception('failed to concat images') _, stat_infosconcat = tempfile.mkstemp(suffix='.pdf', prefix='foobar-', dir='.') _, PAmask_png = tempfile.mkstemp(suffix='.png', prefix='foobar-', dir='.') self.output_gbif.setValue(occurence_map) self.output_PA.setValue(PAmask_png) self.output_reference.setValue(archive_references) self.output_prediction.setValue(archive_predicion) self.output_info.setValue(stat_infosconcat) self.status.set('done', 100)
def execute(self): from os.path import basename from flyingpigeon import sdm from flyingpigeon import spatial_analog as sa from flyingpigeon.utils import archive self.status.set('Start process', 0) try: logger.info('reading the arguments') resources = self.getInputValues(identifier='resources') #taxon_name = self.getInputValues(identifier='taxon_name')[0] #period = self.period.getValue() coords = self.getInputValues(identifier='coords')[0] period = self.getInputValues(identifier='period')[0] coordinate = [float(n) for n in coords.split(',')] #indices = self.input_indices.getValue() indices = self.getInputValues(identifier='input_indices') logger.info("indices = %s ", indices) archive_format = self.archive_format.getValue() except Exception as e: logger.error('failed to read in the arguments %s ' % e) ################################# ### calculate the climate indices ################################# # get the indices ncs_indices = None try: self.status.set('start calculation of climate indices for %s' % indices, 30 ) ncs_indices = sdm.get_indices(resources=resources, indices=indices) logger.info('indice calculation done') except: msg = 'failed to calculate indices' logger.debug(msg) # raise Exception(msg) try: archive_indices = archive(ncs_indices , format=archive_format) logger.info('indices 3D added to tarfile') except: msg = 'failed adding indices to tar' logger.debug(msg) # raise Exception(msg) indices_dic = None try: # sort indices indices_dic = sdm.sort_indices(ncs_indices) logger.info('indice files sorted for %s datasets' % len(indices_dic.keys())) except: msg = 'failed to sort indices' logger.debug(msg) # raise Exception(msg) ncs_references = [] analogs = [] statistics_info = [] for count, key in enumerate(indices_dic.keys()): try: self.status.set('Start processing of %s ' % key, 40 + count * 10) ncs = indices_dic[key] logger.info('with %s files' % len(ncs)) gam_model, statistic_plot = sa.get_gam(ncs, coordinate) statistics_info.append(statistic_plot) self.status.set('GAM sucessfully trained', 70) except: msg = 'failed to train GAM' logger.debug(msg) # raise Exception(msg) try: prediction = sdm.get_prediction(gam_model, ncs_indices) self.status.set('prediction done', 80) except: msg = 'failed to predict' logger.debug(msg) # raise Exception(msg) # try: # from numpy import invert, isnan, nan, broadcast_arrays, array, zeros, linspace, meshgrid # mask = invert(isnan(PAmask)) # mask = broadcast_arrays(prediction, mask)[1] # prediction[mask==False] = nan # self.status.set('land sea mask for predicted data', 90) # except: # logger.debug('failed to mask predicted data') try: analogs.append(sdm.write_to_file(ncs_indices[0], prediction)) logger.info('Analog written to file') #tar_prediction.add(species_file, # arcname = basename(species_file))#.replace(os.path.abspath(os.path.curdir), "")) except: msg = 'failed to write species file' logger.debug(msg) # raise Exception(msg) from flyingpigeon.visualisation import concat_images statistics_infos = None try: statistics_infos = concat_images(statistics_info, orientation='v') logger.info('statistc graphics concatinated') except: msg = 'failed to concat images' logger.debug(msg) # raise Exception(msg) # # archive_references = None # # try: # # archive_references = archive(ncs_references , format=archive_format) # # logger.info('indices 2D added to archive') # # except: # # msg = 'failed adding 2D indices to archive' # # logger.debug(msg) # # # raise Exception(msg) # archive_analogs = None try: archive_analogs = archive(analogs , format=archive_format) logger.info('analog file added to archive') except: msg = 'failed adding analog file to archive' logger.debug(msg) # raise Exception(msg) self.output_indices.setValue( archive_indices ) self.output_analogs.setValue( archive_analogs ) i = next((i for i, x in enumerate(analogs) if x), None) self.output_example.setValue (analogs[i]) self.output_info.setValue(statistics_infos) self.status.set('done', 100)
def execute(self): from os.path import basename from flyingpigeon import sdm from flyingpigeon.utils import archive self.status.set('Start process', 0) try: logger.info('reading the arguments') resources = self.getInputValues(identifier='resources') taxon_name = self.getInputValues(identifier='taxon_name')[0] #period = self.period.getValue() period = self.getInputValues(identifier='period') period = period[0] #indices = self.input_indices.getValue() indices = self.getInputValues(identifier='input_indices') logger.debug("indices = %s for %s ", indices, taxon_name) archive_format = self.archive_format.getValue() except Exception as e: logger.error('failed to read in the arguments %s ' % e) logger.info('indices %s ' % indices) try: self.status.set('Fetching GBIF Data', 10) latlon = sdm.gbif_serach(taxon_name) except Exception as e: logger.exception('failed to search gbif %s' % e) #try: #self.status.set('extract csv file with tree observations', 5) #csv_file = sdm.get_csv(taxon_name[0]) #except Exception as e: #logger.exception('failed to extract csv file from url.') #try: #self.status.set('read in latlon coordinates of tree observations', 10) #latlon = sdm.get_latlon(csv_file) #except Exception as e: #logger.exception('failed to extract the latlon points') try: from flyingpigeon.visualisation import map_gbifoccurrences self.status.set('plotting Tree presents based on coordinates', 15) tree_presents = map_gbifoccurrences(latlon) except Exception as e: msg = 'plotting points failed' logger.exception(msg) with open(tree_presents, 'w') as fp: # TODO: needs to be a png file fp.write(msg) try: self.status.set('generating the PA mask', 20) PAmask = sdm.get_PAmask(coordinates=latlon) logger.info('PA mask sucessfully generated') except Exception as e: logger.exception('failed to generate the PA mask') png_PA_mask = 'PA_mask.png' try: import matplotlib.pyplot as plt self.status.set('Ploting PA mask', 25) fig = plt.figure(figsize=(20,10), dpi=300, facecolor='w', edgecolor='k') cs = plt.contourf(PAmask) fig.savefig(png_PA_mask) plt.close() except Exception as e: msg = 'failed to plot the PA mask' logger.exception(msg) with open(png_PA_mask, 'w') as fp: # TODO: needs to be a png file fp.write(msg) ################################# ### calculate the climate indices ################################# # get the indices ncs_indices = None try: self.status.set('start calculation of climate indices for %s' % indices, 30 ) ncs_indices = sdm.get_indices(resources=resources, indices=indices) logger.info('indice calculation done') except: msg = 'failed to calculate indices' logger.exception(msg) raise Exception(msg) try: archive_indices = archive(ncs_indices , format=archive_format) logger.info('indices 3D added to tarfile') except: msg = 'failed adding indices to tar' logger.exception(msg) raise Exception(msg) indices_dic = None try: # sort indices indices_dic = sdm.sort_indices(ncs_indices) logger.info('indice files sorted for %s Datasets' % len(indices_dic.keys())) except: msg = 'failed to sort indices' logger.exception(msg) raise Exception(msg) # try: # # open tar files # tar_reference = tarfile.open('reference.tar', "w") # tar_indices = tarfile.open('indices.tar', "w") # tar_info = tarfile.open('info.tar', "w") # tar_prediction = tarfile.open('prediction.tar', "w") # logger.info('tar files prepared') # except: # msg = 'tar file preparation failed' # logger.exception(msg) # raise Exception(msg) ncs_references = [] species_files = [] statistics_info = [] for count,key in enumerate(indices_dic.keys()): try: self.status.set('Start processing of %s ' % key, 40 + count * 10) ncs = indices_dic[key] logger.info('with %s files' % len(ncs)) try: ncs_references.extend(sdm.get_reference(ncs_indices=ncs, period=period)) logger.info('reference indice calculated %s ' % ncs_references) except: msg = 'failed adding ref indices to tar' logger.exception(msg) raise Exception(msg) # for nc_reference in ncs_references: # tar_reference.add(nc_reference, # arcname = basename(nc_reference))# nc_reference.replace(os.path.abspath(os.path.curdir), "")) # logger.info('reference indices added to tarfile') except: msg = 'failed to calculate reference indices.' logger.exception(msg) raise Exception(msg) try: gam_model, predict_gam, gam_info = sdm.get_gam(ncs_references,PAmask) statistics_info.append(gam_info) self.status.set('GAM sucessfully trained', 70) except: msg = 'failed to train GAM' logger.exception(msg) raise Exception(msg) try: prediction = sdm.get_prediction(gam_model, ncs_indices) self.status.set('prediction done', 80) except: msg = 'failed to predict' logger.exception(msg) raise Exception(msg) try: from numpy import invert, isnan, nan, broadcast_arrays, array, zeros, linspace, meshgrid mask = invert(isnan(PAmask)) mask = broadcast_arrays(prediction, mask)[1] prediction[mask==False] = nan self.status.set('land sea mask for predicted data', 90) except: logger.exception('failed to mask predicted data') try: species_files.append(sdm.write_to_file(ncs_indices[0], prediction)) logger.info('Favourabillity written to file') #tar_prediction.add(species_file, # arcname = basename(species_file))#.replace(os.path.abspath(os.path.curdir), "")) except: msg = 'failed to write species file' logger.exception(msg) raise Exception(msg) from flyingpigeon.visualisation import concat_images statistics_infos = None try: statistics_infos = concat_images(statistics_info, orientation='v') except: msg = 'failed to concat images' logger.exception(msg) raise Exception(msg) archive_references = None try: archive_references = archive(ncs_references , format=archive_format) logger.info('indices 2D added to archive') except: msg = 'failed adding 2D indices to archive' logger.exception(msg) raise Exception(msg) archive_predicion = None try: archive_predicion = archive(species_files , format=archive_format) logger.info('species_files added to archive') except: msg = 'failed adding species_files indices to archive' logger.exception(msg) raise Exception(msg) # try: # #tar_indices.close() # #tar_reference.close() # tar_prediction.close() # #tar_info.close() # logger.info('tar files closed') # except: # logger.exception('tar file closing failed') # raise Exception #self.output_csv.setValue( csv_file ) self.output_gbif.setValue( tree_presents ) self.output_PA.setValue( png_PA_mask ) self.output_indices.setValue( archive_indices ) self.output_reference.setValue (archive_references) self.output_prediction.setValue (archive_predicion) self.output_info.setValue(statistics_infos) self.status.set('done', 100)
def _handler(self, request, response): init_process_logger('log.txt') response.outputs['output_log'].file = 'log.txt' response.update_status('Start process', 0) try: LOGGER.info('reading the arguments') resources = archiveextract( resource=rename_complexinputs(request.inputs['resource'])) taxon_name = request.inputs['taxon_name'][0].data bbox = [-180, -90, 180, 90] # bbox_obj = self.BBox.getValue() # bbox = [bbox_obj.coords[0][0], # bbox_obj.coords[0][1], # bbox_obj.coords[1][0], # bbox_obj.coords[1][1]] period = request.inputs['period'] period = period[0].data indices = [inpt.data for inpt in request.inputs['indices']] archive_format = request.inputs['archive_format'][0].data LOGGER.exception("indices = %s for %s ", indices, taxon_name) LOGGER.info("bbox={0}".format(bbox)) except: LOGGER.exception('failed to read in the arguments') LOGGER.info('indices %s ' % indices) try: response.update_status('Fetching GBIF Data', 10) gbifdic = sdm.get_gbif(taxon_name, bbox=bbox) LOGGER.info('Fetched GBIF data') except: msg = 'failed to search gbif.' LOGGER.exception(msg) raise Exception(msg) try: response.update_status('write csv file', 70) gbifcsv = sdm.gbifdic2csv(gbifdic) LOGGER.info('GBIF data written to file') except: msg = 'failed to write csv file.' LOGGER.exception(msg) raise Exception(msg) try: response.update_status('plot map', 80) latlon = sdm.latlon_gbifdic(gbifdic) occurence_map = map_gbifoccurrences(latlon) except: msg = 'failed to plot occurence map.' LOGGER.exception(msg) raise Exception(msg) ################################# # calculate the climate indices ################################# # get the indices ncs_indices = None try: response.update_status( 'start calculation of climate indices for %s' % indices, 30) ncs_indices = sdm.get_indices(resource=resources, indices=indices) LOGGER.info('indice calculation done') except: msg = 'failed to calculate indices' LOGGER.exception(msg) raise Exception(msg) try: # sort indices indices_dic = sdm.sort_indices(ncs_indices) LOGGER.info('indice files sorted in dictionary') except: msg = 'failed to sort indices' LOGGER.exception(msg) indices_dic = {'dummy': []} ncs_references = [] species_files = [] stat_infos = [] PAmask_pngs = [] response.update_status('Start processing for %s Datasets' % len(indices_dic.keys())) for count, key in enumerate(indices_dic.keys()): try: staus_nr = 40 + count * 10 response.update_status('Start processing of %s' % key, staus_nr) ncs = indices_dic[key] LOGGER.info('with %s files' % len(ncs)) try: response.update_status('generating the PA mask', 20) PAmask = sdm.get_PAmask(coordinates=latlon, nc=ncs[0]) LOGGER.info('PA mask sucessfully generated') except: LOGGER.exception('failed to generate the PA mask') try: response.update_status('Ploting PA mask', 25) PAmask_pngs.extend([map_PAmask(PAmask)]) except: LOGGER.exception('failed to plot the PA mask') try: ncs_reference = sdm.get_reference(ncs_indices=ncs, period=period) ncs_references.extend(ncs_reference) LOGGER.info('reference indice calculated %s ' % ncs_references) except: msg = 'failed to calculate the reference' LOGGER.exception(msg) try: gam_model, predict_gam, gam_info = sdm.get_gam( ncs_reference, PAmask) stat_infos.append(gam_info) response.update_status('GAM sucessfully trained', staus_nr + 5) except: msg = 'failed to train GAM for %s' % (key) LOGGER.exception(msg) try: prediction = sdm.get_prediction(gam_model, ncs) response.update_status('prediction done', staus_nr + 7) except: msg = 'failed to predict tree occurence' LOGGER.exception(msg) # raise Exception(msg) # try: # response.update_status('land sea mask for predicted data', staus_nr + 8) # from numpy import invert, isnan, nan, broadcast_arrays # , array, zeros, linspace, meshgrid # mask = invert(isnan(PAmask)) # mask = broadcast_arrays(prediction, mask)[1] # prediction[mask is False] = nan # except: # LOGGER.exception('failed to mask predicted data') try: species_files.append(sdm.write_to_file(ncs[0], prediction)) LOGGER.info('Favourabillity written to file') except: msg = 'failed to write species file' LOGGER.exception(msg) # raise Exception(msg) except: msg = 'failed to calculate reference indices' LOGGER.exception(msg) raise Exception(msg) try: archive_indices = archive(ncs_indices, format=archive_format) LOGGER.info('indices added to archive') except: msg = 'failed adding indices to archive' LOGGER.exception(msg) archive_indices = tempfile.mkstemp(suffix='.tar', prefix='foobar-', dir='.') try: archive_references = archive(ncs_references, format=archive_format) LOGGER.info('indices reference added to archive') except: msg = 'failed adding reference indices to archive' LOGGER.exception(msg) archive_references = tempfile.mkstemp(suffix='.tar', prefix='foobar-', dir='.') try: archive_prediction = archive(species_files, format=archive_format) LOGGER.info('species_files added to archive') except: msg = 'failed adding species_files indices to archive' LOGGER.exception(msg) raise Exception(msg) try: stat_infosconcat = pdfmerge(stat_infos) LOGGER.debug('pngs %s' % PAmask_pngs) PAmask_png = concat_images(PAmask_pngs, orientation='h') LOGGER.info('stat infos pdfs and mask pngs merged') except: LOGGER.exception('failed to concat images') _, stat_infosconcat = tempfile.mkstemp(suffix='.pdf', prefix='foobar-', dir='.') _, PAmask_png = tempfile.mkstemp(suffix='.png', prefix='foobar-', dir='.') response.outputs['output_gbif'].file = occurence_map response.outputs['output_PA'].file = PAmask_png response.outputs['output_indices'].file = archive_indices response.outputs['output_reference'].file = archive_references response.outputs['output_prediction'].file = archive_prediction response.outputs['output_info'].file = stat_infosconcat response.outputs['output_csv'].file = gbifcsv response.update_status('done', 100) return response
def get_gam(ncs_indices, coordinate): from netCDF4 import Dataset from os.path import basename from shapely.geometry import Point from numpy import squeeze, ravel, isnan, nan, array, reshape from flyingpigeon.utils import get_variable, get_values, unrotate_pole from flyingpigeon.ocgis_module import call try: from rpy2.robjects.packages import importr import rpy2.robjects as ro import rpy2.robjects.numpy2ri rpy2.robjects.numpy2ri.activate() base = importr("base") stats = importr("stats") mgcv = importr("mgcv") logger.info('rpy2 modules imported') except Exception as e: msg = 'failed to import rpy2 modules %s' % e logger.debug(msg) raise Exception(msg) for i, ncs in enumerate(ncs_indices): # ocgis need unrotated coordinates to extract points # unrotate_pole writes lats lons into the file. # ACHTUNG: will fail if the data is stored on a file system with no write permissions try: lats, lons = unrotate_pole(ncs, write_to_file=True) point = Point(float(coordinate[0]), float(coordinate[1])) # get the values variable = get_variable(ncs) agg = basename(ncs).split('_')[-2] indice = '%s_%s' % (variable, agg) timeseries = call(resource=ncs, geom=point, select_nearest=True) ts = Dataset(timeseries) vals = squeeze(ts.variables[variable][:]) from numpy import min, max, mean, append, zeros, ones dif = max(vals) - min(vals) a = append(vals - dif ,vals) vals = append(a, vals+dif) if i == 0 : from numpy import zeros, ones a = append (zeros(len(vals)) , ones(len(vals)) ) PA = append(a , zeros(len(vals))) data = {'PA': ro.FloatVector(PA)} data[str(indice)] = ro.FloatVector(vals) form = 'PA ~ ' form = form + 's(%s, k=3)' % indice else: form = form + ' + s(%s, k=3)' % indice data[str(indice)] = ro.FloatVector(vals) except Exception as e: msg = 'Failed to prepare data %s' % e logger.debug(msg) try: logger.info(data) dataf = ro.DataFrame(data) eq = ro.Formula(str(form)) gam_model = mgcv.gam(base.eval(eq), data=dataf, family=stats.binomial(), scale=-1, na_action=stats.na_exclude) # logger.info('GAM model trained') except Exception as e: msg = 'Failed to generate GAM model %s' % e logger.debug(msg) # ### ########################### # # plot response curves # ### ########################### try: from flyingpigeon.visualisation import concat_images from tempfile import mkstemp grdevices = importr('grDevices') graphicDev = importr('Cairo') infos = [] for i in range(1,len(ncs_indices)+1): ip, info = mkstemp(dir='.',suffix='.png') #grdevices.png(filename=info) #graphicDev.CairoPDF(info, width = 7, height = 7, pointsize = 12) graphicDev.CairoPNG(info, width = 640 , height = 480, pointsize = 12) # 640, 480) #, pointsize = 12 width = 30, height = 30, print 'file opened!' infos.append(info) #grdevices.png(filename=info) ylim = ro.IntVector([-6,6]) trans = ro.r('function(x){exp(x)/(1+exp(x))}') mgcv.plot_gam(gam_model, trans=trans, shade='T', col='black',select=i,ylab='Predicted Probability',rug=False , cex_lab = 1.4, cex_axis = 1.4, ) # print 'gam plotted ;-)' #ylim=ylim, , grdevices.dev_off() #graphicDev.dev_off() #graphicDev.Cairo_onSave( dev_cur(), onSave=True ) print(' %s plots generated ' % len(infos)) infos_concat = concat_images(infos, orientation='h') except Exception as e: msg = 'Failed to plot statistical graphic %s' % e logger.debug(msg) raise Exception(msg) return gam_model, infos_concat
def _handler(self, request, response): init_process_logger('log.txt') response.outputs['output_log'].file = 'log.txt' response.update_status('Start process', 0) try: response.update_status('reading the arguments', 5) resources = archiveextract( resource=rename_complexinputs(request.inputs['resources'])) period = request.inputs['period'] period = period[0].data archive_format = request.inputs['archive_format'][0].data LOGGER.info("all arguments read in nr of files in resources: {}".format(len(resources))) except Exception as ex: LOGGER.exception('failed to read in the arguments: {}'.format(str(ex))) try: gbif_url = request.inputs['gbif'][0].data csv_file = download(gbif_url) LOGGER.info('CSV file fetched sucessfully: %s' % csv_file) except Exception as ex: LOGGER.exception('failed to fetch GBIF file: {}'.format(str(ex))) try: response.update_status('read in latlon coordinates', 10) latlon = sdm.latlon_gbifcsv(csv_file) LOGGER.info('read in the latlon coordinates') except Exception as ex: LOGGER.exception('failed to extract the latlon points: {}'.format(str(ex))) try: response.update_status('plot map', 20) occurence_map = map_gbifoccurrences(latlon) LOGGER.info('GBIF occourence ploted') except Exception as ex: LOGGER.exception('failed to plot occurrence map: {}'.format(str(ex))) try: # sort indices indices_dic = sdm.sort_indices(resources) LOGGER.info('indice files sorted in dictionary') except Exception as ex: msg = 'failed to sort indices: {}'.format(str(ex)) LOGGER.exception(msg) indices_dic = {'dummy': []} ncs_references = [] species_files = [] stat_infos = [] PAmask_pngs = [] response.update_status('Start processing for {} datasets'.format(len(indices_dic.keys()))) for count, key in enumerate(indices_dic.keys()): try: status_nr = 40 + count * 10 response.update_status('Start processing of {}'.format(key), status_nr) ncs = indices_dic[key] LOGGER.info('with {} files'.format(len(ncs))) try: response.update_status('generating the PA mask', 20) PAmask = sdm.get_PAmask(coordinates=latlon, nc=ncs[0]) LOGGER.info('PA mask sucessfully generated') except Exception as ex: msg = 'failed to generate the PA mask: {}'.format(str(ex)) LOGGER.exception(msg) raise Exception(msg) try: response.update_status('Ploting PA mask', 25) PAmask_pngs.extend([map_PAmask(PAmask)]) except Exception as ex: msg = 'failed to plot the PA mask: {}'.format(str(ex)) LOGGER.exception(msg) raise Exception(msg) try: ncs_reference = sdm.get_reference(ncs_indices=ncs, period=period) ncs_references.extend(ncs_reference) LOGGER.info('reference indice calculated: {}'.format(ncs_references)) response.update_status('reference indice calculated', status_nr + 2) except Exception as ex: msg = 'failed to calculate the reference: {}'.format(str(ex)) LOGGER.exception(msg) raise Exception(msg) try: gam_model, predict_gam, gam_info = sdm.get_gam(ncs_reference, PAmask, modelname=key) stat_infos.append(gam_info) response.update_status('GAM sucessfully trained', status_nr + 5) except Exception as ex: msg = 'failed to train GAM for {}: {}'.format(key, str(ex)) LOGGER.exception(msg) raise Exception(msg) try: prediction = sdm.get_prediction(gam_model, ncs) response.update_status('prediction done', status_nr + 7) except Exception as ex: msg = 'failed to predict tree occurence: {}'.format(str(ex)) LOGGER.exception(msg) raise Exception(msg) # try: # response.update_status('land sea mask for predicted data', status_nr + 8) # from numpy import invert, isnan, nan, broadcast_arrays # , array, zeros, linspace, meshgrid # mask = invert(isnan(PAmask)) # mask = broadcast_arrays(prediction, mask)[1] # prediction[mask is False] = nan # except: # LOGGER.exception('failed to mask predicted data') try: species_files.append(sdm.write_to_file(ncs[0], prediction)) LOGGER.info('Favourabillity written to file') except Exception as ex: msg = 'failed to write species file: {}'.format(str(ex)) LOGGER.exception(msg) raise Exception(msg) except Exception as ex: msg = 'failed to process SDM chain for {} : {}'.format(key, str(ex)) LOGGER.exception(msg) raise Exception(msg) try: archive_references = archive(ncs_references, format=archive_format) LOGGER.info('indices 2D added to archive') except Exception as ex: msg = 'failed adding 2D indices to archive: {}'.format(str(ex)) LOGGER.exception(msg) raise Exception(msg) archive_references = tempfile.mkstemp(suffix='.tar', prefix='foobar-', dir='.') try: archive_prediction = archive(species_files, format=archive_format) LOGGER.info('species_files added to archive') except Exception as ex: msg = 'failed adding species_files indices to archive: {}'.format(str(ex)) LOGGER.exception(msg) raise Exception(msg) archive_predicion = tempfile.mkstemp(suffix='.tar', prefix='foobar-', dir='.') try: from flyingpigeon.visualisation import pdfmerge, concat_images stat_infosconcat = pdfmerge(stat_infos) LOGGER.debug('pngs: {}'.format(PAmask_pngs)) PAmask_png = concat_images(PAmask_pngs, orientation='h') LOGGER.info('stat infos pdfs and mask pngs merged') except Exception as ex: msg = 'failed to concat images: {}'.format(str(ex)) LOGGER.exception(msg) raise Exception(msg) _, stat_infosconcat = tempfile.mkstemp(suffix='.pdf', prefix='foobar-', dir='.') _, PAmask_png = tempfile.mkstemp(suffix='.png', prefix='foobar-', dir='.') response.outputs['output_gbif'].file = occurence_map response.outputs['output_PA'].file = PAmask_png response.outputs['output_reference'].file = archive_references response.outputs['output_prediction'].file = archive_prediction response.outputs['output_info'].file = stat_infosconcat response.update_status('done', 100) return response
def execute(self): from os.path import basename from flyingpigeon import sdm from flyingpigeon.utils import archive self.status.set('Start process', 0) try: logger.info('reading the arguments') resources = self.getInputValues(identifier='resources') gbif = self.getInputValues(identifier='gbif') #period = self.period.getValue() period = self.getInputValues(identifier='period') period = period[0] #indices = self.input_indices.getValue() indices = self.getInputValues(identifier='input_indices') logger.debug("indices = %s", indices) archive_format = self.archive_format.getValue() except Exception as e: logger.error('failed to read in the arguments %s ' % e) logger.info('indices %s ' % indices) try: self.status.set('extract csv file with tree observations', 5) csv_file = sdm.get_csv(gbif[0]) except Exception as e: logger.exception('failed to extract csv file from url.') try: self.status.set('read in latlon coordinates of tree observations', 10) latlon = sdm.get_latlon(csv_file) except Exception as e: logger.exception('failed to extract the latlon points') tree_presents = 'tree_presents.png' try: self.status.set('plotting Tree presents based on coordinates', 15) import matplotlib.pyplot as plt from cartopy import config from cartopy.util import add_cyclic_point import cartopy.crs as ccrs fig = plt.figure(figsize=(20, 10), dpi=600, facecolor='w', edgecolor='k') ax = plt.axes(projection=ccrs.Robinson(central_longitude=0)) ax.coastlines() ax.set_global() cs = plt.scatter(latlon[:, 1], latlon[:, 0], transform=ccrs.PlateCarree()) fig.savefig(tree_presents) plt.close() except Exception as e: msg = 'plotting points failed' logger.exception(msg) with open(tree_presents, 'w') as fp: # TODO: needs to be a png file fp.write(msg) try: self.status.set('generating the PA mask', 20) PAmask = sdm.get_PAmask(coordinates=latlon) logger.info('PA mask sucessfully generated') except Exception as e: logger.exception('failed to generate the PA mask') png_PA_mask = 'PA_mask.png' try: self.status.set('Ploting PA mask', 25) fig = plt.figure(figsize=(20, 10), dpi=300, facecolor='w', edgecolor='k') cs = plt.contourf(PAmask) fig.savefig(png_PA_mask) plt.close() except Exception as e: msg = 'failed to plot the PA mask' logger.exception(msg) with open(png_PA_mask, 'w') as fp: # TODO: needs to be a png file fp.write(msg) ################################# ### calculate the climate indices ################################# # get the indices ncs_indices = None try: self.status.set( 'start calculation of climate indices for %s' % indices, 30) ncs_indices = sdm.get_indices(resources=resources, indices=indices) logger.info('indice calculation done') except: msg = 'failed to calculate indices' logger.exception(msg) raise Exception(msg) try: archive_indices = archive(ncs_indices, format=archive_format) logger.info('indices 3D added to tarfile') except: msg = 'failed adding indices to tar' logger.exception(msg) raise Exception(msg) indices_dic = None try: # sort indices indices_dic = sdm.sort_indices(ncs_indices) logger.info('indice files sorted for %s Datasets' % len(indices_dic.keys())) except: msg = 'failed to sort indices' logger.exception(msg) raise Exception(msg) # try: # # open tar files # tar_reference = tarfile.open('reference.tar', "w") # tar_indices = tarfile.open('indices.tar', "w") # tar_info = tarfile.open('info.tar', "w") # tar_prediction = tarfile.open('prediction.tar', "w") # logger.info('tar files prepared') # except: # msg = 'tar file preparation failed' # logger.exception(msg) # raise Exception(msg) ncs_references = [] species_files = [] statistics_info = [] for count, key in enumerate(indices_dic.keys()): try: self.status.set('Start processing of %s ' % key, 40 + count * 10) ncs = indices_dic[key] logger.info('with %s files' % len(ncs)) try: ncs_references.extend( sdm.get_reference(ncs_indices=ncs, period=period)) logger.info('reference indice calculated %s ' % ncs_references) except: msg = 'failed adding ref indices to tar' logger.exception(msg) raise Exception(msg) # for nc_reference in ncs_references: # tar_reference.add(nc_reference, # arcname = basename(nc_reference))# nc_reference.replace(os.path.abspath(os.path.curdir), "")) # logger.info('reference indices added to tarfile') except: msg = 'failed to calculate reference indices.' logger.exception(msg) raise Exception(msg) try: gam_model, predict_gam, gam_info = sdm.get_gam( ncs_references, PAmask) statistics_info.append(gam_info) self.status.set('GAM sucessfully trained', 70) except: msg = 'failed to train GAM' logger.exception(msg) raise Exception(msg) try: prediction = sdm.get_prediction(gam_model, ncs_indices) self.status.set('prediction done', 80) except: msg = 'failed to predict' logger.exception(msg) raise Exception(msg) try: from numpy import invert, isnan, nan, broadcast_arrays, array, zeros, linspace, meshgrid mask = invert(isnan(PAmask)) mask = broadcast_arrays(prediction, mask)[1] prediction[mask == False] = nan self.status.set('land sea mask for predicted data', 90) except: logger.exception('failed to mask predicted data') try: species_files.append( sdm.write_to_file(ncs_indices[0], prediction)) logger.info('Favourabillity written to file') #tar_prediction.add(species_file, # arcname = basename(species_file))#.replace(os.path.abspath(os.path.curdir), "")) except: msg = 'failed to write species file' logger.exception(msg) raise Exception(msg) from flyingpigeon.visualisation import concat_images statistics_infos = None try: statistics_infos = concat_images(statistics_info, orientation='v') except: msg = 'failed to concat images' logger.exception(msg) raise Exception(msg) archive_references = None try: archive_references = archive(ncs_references, format=archive_format) logger.info('indices 2D added to archive') except: msg = 'failed adding 2D indices to archive' logger.exception(msg) raise Exception(msg) archive_predicion = None try: archive_predicion = archive(species_files, format=archive_format) logger.info('species_files added to archive') except: msg = 'failed adding species_files indices to archive' logger.exception(msg) raise Exception(msg) # try: # #tar_indices.close() # #tar_reference.close() # tar_prediction.close() # #tar_info.close() # logger.info('tar files closed') # except: # logger.exception('tar file closing failed') # raise Exception self.output_csv.setValue(csv_file) self.output_gbif.setValue(tree_presents) self.output_PA.setValue(png_PA_mask) self.output_indices.setValue(archive_indices) self.output_reference.setValue(archive_references) self.output_prediction.setValue(archive_predicion) self.output_info.setValue(statistics_infos) self.status.set('done', 100)
def get_gam(ncs_indices, coordinate): from netCDF4 import Dataset from os.path import basename from shapely.geometry import Point from numpy import squeeze, ravel, isnan, nan, array, reshape from flyingpigeon.utils import get_variable, get_values, unrotate_pole from flyingpigeon.ocgis_module import call try: from rpy2.robjects.packages import importr import rpy2.robjects as ro import rpy2.robjects.numpy2ri rpy2.robjects.numpy2ri.activate() base = importr("base") stats = importr("stats") mgcv = importr("mgcv") logger.info('rpy2 modules imported') except Exception as e: msg = 'failed to import rpy2 modules %s' % e logger.debug(msg) raise Exception(msg) for i, ncs in enumerate(ncs_indices): # ocgis need unrotated coordinates to extract points # unrotate_pole writes lats lons into the file. # ACHTUNG: will fail if the data is stored on a file system with no write permissions try: lats, lons = unrotate_pole(ncs, write_to_file=True) point = Point(float(coordinate[0]), float(coordinate[1])) # get the values variable = get_variable(ncs) agg = basename(ncs).split('_')[-2] indice = '%s_%s' % (variable, agg) timeseries = call(resource=ncs, geom=point, select_nearest=True) ts = Dataset(timeseries) vals = squeeze(ts.variables[variable][:]) from numpy import min, max, mean, append, zeros, ones dif = max(vals) - min(vals) a = append(vals - dif, vals) vals = append(a, vals + dif) if i == 0: from numpy import zeros, ones a = append(zeros(len(vals)), ones(len(vals))) PA = append(a, zeros(len(vals))) data = {'PA': ro.FloatVector(PA)} data[str(indice)] = ro.FloatVector(vals) form = 'PA ~ ' form = form + 's(%s, k=3)' % indice else: form = form + ' + s(%s, k=3)' % indice data[str(indice)] = ro.FloatVector(vals) except Exception as e: msg = 'Failed to prepare data %s' % e logger.debug(msg) try: logger.info(data) dataf = ro.DataFrame(data) eq = ro.Formula(str(form)) gam_model = mgcv.gam(base.eval(eq), data=dataf, family=stats.binomial(), scale=-1, na_action=stats.na_exclude) # logger.info('GAM model trained') except Exception as e: msg = 'Failed to generate GAM model %s' % e logger.debug(msg) # ### ########################### # # plot response curves # ### ########################### try: from flyingpigeon.visualisation import concat_images from tempfile import mkstemp grdevices = importr('grDevices') graphicDev = importr('Cairo') infos = [] for i in range(1, len(ncs_indices) + 1): ip, info = mkstemp(dir='.', suffix='.png') #grdevices.png(filename=info) #graphicDev.CairoPDF(info, width = 7, height = 7, pointsize = 12) graphicDev.CairoPNG( info, width=640, height=480, pointsize=12 ) # 640, 480) #, pointsize = 12 width = 30, height = 30, print 'file opened!' infos.append(info) #grdevices.png(filename=info) ylim = ro.IntVector([-6, 6]) trans = ro.r('function(x){exp(x)/(1+exp(x))}') mgcv.plot_gam( gam_model, trans=trans, shade='T', col='black', select=i, ylab='Predicted Probability', rug=False, cex_lab=1.4, cex_axis=1.4, ) # print 'gam plotted ;-)' #ylim=ylim, , grdevices.dev_off() #graphicDev.dev_off() #graphicDev.Cairo_onSave( dev_cur(), onSave=True ) print(' %s plots generated ' % len(infos)) infos_concat = concat_images(infos, orientation='h') except Exception as e: msg = 'Failed to plot statistical graphic %s' % e logger.debug(msg) raise Exception(msg) return gam_model, infos_concat
def execute(self): logger.info('Start process') try: logger.info('read in the arguments') resources = self.getInputValues(identifier='resources') method = self.getInputValues(identifier='method') time_region = self.getInputValues(identifier='time_region')[0] bbox = self.getInputValues(identifier='BBox')[0] logger.info('bbox %s' % str(bbox)) logger.info('time_region %s' % str(time_region)) logger.info('method: %s' % str(method)) except Exception as e: logger.error('failed to read in the arguments %s ' % e) #bbox = '-80,22.5,50,70' logger.info('bbox is set to %s' % bbox) ##################### ### get the required bbox from resource ##################### # from flyingpigeon.ocgis_module import call from flyingpigeon.utils import sort_by_filename, get_time # , calc_grouping from flyingpigeon import weatherclass as wc from flyingpigeon.visualisation import plot_tSNE, plot_kMEAN, concat_images, plot_pressuremap from datetime import datetime as dt from numpy import savetxt, column_stack import tarfile from cdo import * cdo = Cdo() # grouping = calc_grouping(time_region) ncs = sort_by_filename(resources, historical_concatination=True) png_clusters = [] txt_info = [] png_pressuremaps = [] try: # open tar files tar_info = tarfile.open('info.tar', "w") logger.info('tar files prepared') except: msg = 'tar file preparation failed' logger.exception(msg) raise Exception(msg) for key in ncs.keys(): if len(ncs[key])>1: input = cdo.timmerge(input=ncs[key], output='merge.nc' ) elif len(ncs[key])==1: input = ncs[key] else: logger.debug('invalid number of input files for dataset %s' % key) #for tr in time_region: if not time_region == 'None': nc_grouped = cdo.selmon(time_region, input=input, output='grouped.nc') else: nc_grouped = input # for bb in bbox: nc = cdo.sellonlatbox('%s' % bbox, input=nc_grouped, output='subset.nc') logger.info('nc subset: %s ' % nc) try: vals, pca = wc.get_pca(nc) logger.info('PCa calculated') except: logger.debug('failed to calculate PCs') raise for md in method: try: if md == 'tSNE': data = wc.calc_tSNE(pca) png_clusters.append(plot_tSNE(data,title='tSNE month: %s [lonlat: %s]' % (time_region,bbox), sub_title='file: %s' % key)) logger.info('tSNE calculated for %s ' % key) if md == 'kMEAN': kmeans = wc.calc_kMEAN(pca) c = kmeans.predict(pca) times = get_time(nc) timestr = [dt.strftime(t, format='%Y-%d-%m_%H:%M:%S') for t in times] tc = column_stack([timestr, c]) fn = '%s.txt' % key savetxt(fn, tc, fmt='%s', header='Date_Time WeatherRegime') tar_info.add(fn) #, arcname = basename(nc) png_clusters.append(plot_kMEAN(kmeans, pca, title='kMEAN month: %s [lonlat: %s]' % (time_region,bbox), sub_title='file: %s' % key)) logger.info('kMEAN calculated for %s ' % key) subplots = [] for i in range(4): subplots.append(plot_pressuremap((vals[c==i]/100), title='Weather Regime %s: Month %s ' % (i, time_region), sub_title='file: %s' % key)) from PIL import Image import sys from tempfile import mkstemp open_subplots = map(Image.open, subplots) w = max(i.size[0] for i in open_subplots) h = max(i.size[1] for i in open_subplots) result = Image.new("RGB", (w*2, h*2)) # p = h / len(open_subplots) c = 0 for i ,iw in enumerate([0,w]): for j, jh in enumerate([0,h]): oi = open_subplots[c] c = c +1 cw = oi.size[0] ch = oi.size[1] box = [iw,jh,iw+cw,jh+ch] result.paste(oi, box=box) ip, pressuremap = mkstemp(dir='.',suffix='.png') result.save(pressuremap) png_pressuremaps.append(pressuremap) except: logger.debug('faild to calculate cluster for %s' % key ) raise c_clusters = concat_images(png_clusters) c_maps = concat_images(png_pressuremaps) try: tar_info.close() logger.info('tar files closed') except Exception as e: logger.exception('tar file closing failed') # call # self.output_nc.setValue( nc ) self.output_clusters.setValue( c_clusters ) self.output_maps.setValue( c_maps ) self.output_info.setValue('info.tar')
def execute(self): from os.path import basename from flyingpigeon import sdm from flyingpigeon import spatial_analog as sa from flyingpigeon.utils import archive self.status.set('Start process', 0) try: logger.info('reading the arguments') resources = self.getInputValues(identifier='resources') #taxon_name = self.getInputValues(identifier='taxon_name')[0] #period = self.period.getValue() coords = self.getInputValues(identifier='coords')[0] period = self.getInputValues(identifier='period')[0] coordinate = [float(n) for n in coords.split(',')] #indices = self.input_indices.getValue() indices = self.getInputValues(identifier='input_indices') logger.info("indices = %s ", indices) archive_format = self.archive_format.getValue() except Exception as e: logger.error('failed to read in the arguments %s ' % e) ################################# ### calculate the climate indices ################################# # get the indices ncs_indices = None try: self.status.set( 'start calculation of climate indices for %s' % indices, 30) ncs_indices = sdm.get_indices(resources=resources, indices=indices) logger.info('indice calculation done') except: msg = 'failed to calculate indices' logger.debug(msg) # raise Exception(msg) try: archive_indices = archive(ncs_indices, format=archive_format) logger.info('indices 3D added to tarfile') except: msg = 'failed adding indices to tar' logger.debug(msg) # raise Exception(msg) indices_dic = None try: # sort indices indices_dic = sdm.sort_indices(ncs_indices) logger.info('indice files sorted for %s datasets' % len(indices_dic.keys())) except: msg = 'failed to sort indices' logger.debug(msg) # raise Exception(msg) ncs_references = [] analogs = [] statistics_info = [] for count, key in enumerate(indices_dic.keys()): try: self.status.set('Start processing of %s ' % key, 40 + count * 10) ncs = indices_dic[key] logger.info('with %s files' % len(ncs)) gam_model, statistic_plot = sa.get_gam(ncs, coordinate) statistics_info.append(statistic_plot) self.status.set('GAM sucessfully trained', 70) except: msg = 'failed to train GAM' logger.debug(msg) # raise Exception(msg) try: prediction = sdm.get_prediction(gam_model, ncs_indices) self.status.set('prediction done', 80) except: msg = 'failed to predict' logger.debug(msg) # raise Exception(msg) # try: # from numpy import invert, isnan, nan, broadcast_arrays, array, zeros, linspace, meshgrid # mask = invert(isnan(PAmask)) # mask = broadcast_arrays(prediction, mask)[1] # prediction[mask==False] = nan # self.status.set('land sea mask for predicted data', 90) # except: # logger.debug('failed to mask predicted data') try: analogs.append(sdm.write_to_file(ncs_indices[0], prediction)) logger.info('Analog written to file') #tar_prediction.add(species_file, # arcname = basename(species_file))#.replace(os.path.abspath(os.path.curdir), "")) except: msg = 'failed to write species file' logger.debug(msg) # raise Exception(msg) from flyingpigeon.visualisation import concat_images statistics_infos = None try: statistics_infos = concat_images(statistics_info, orientation='v') logger.info('statistc graphics concatinated') except: msg = 'failed to concat images' logger.debug(msg) # raise Exception(msg) # # archive_references = None # # try: # # archive_references = archive(ncs_references , format=archive_format) # # logger.info('indices 2D added to archive') # # except: # # msg = 'failed adding 2D indices to archive' # # logger.debug(msg) # # # raise Exception(msg) # archive_analogs = None try: archive_analogs = archive(analogs, format=archive_format) logger.info('analog file added to archive') except: msg = 'failed adding analog file to archive' logger.debug(msg) # raise Exception(msg) self.output_indices.setValue(archive_indices) self.output_analogs.setValue(archive_analogs) i = next((i for i, x in enumerate(analogs) if x), None) self.output_example.setValue(analogs[i]) self.output_info.setValue(statistics_infos) self.status.set('done', 100)