def export_metadata(info_role): """ Route to export the metadata in CSV The table synthese is join with gn_synthese.v_metadata_for_export The column jdd_id is mandatory in the view gn_synthese.v_metadata_for_export Parameters : Via POST: Use a list of id_synthese (in POST parameters) to filter the v_synthese_for_export_view """ filters = {key: request.args.getlist(key) for key, value in request.args.items()} metadata_view = GenericTable("v_metadata_for_export", "gn_synthese", None) q = DB.session.query( distinct(VSyntheseForWebApp.id_dataset), metadata_view.tableDef ).join( metadata_view.tableDef, getattr( metadata_view.tableDef.columns, current_app.config["SYNTHESE"]["EXPORT_METADATA_ID_DATASET_COL"], ) == VSyntheseForWebApp.id_dataset, ) q = synthese_query.filter_query_all_filters( VSyntheseForWebApp, q, filters, info_role ) return to_csv_resp( datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S"), data=[metadata_view.as_dict(d) for d in q.all()], separator=";", columns=[db_col.key for db_col in metadata_view.tableDef.columns], )
def export_metadata(info_role): """ Route to export the metadata in CSV .. :quickref: Synthese; The table synthese is join with gn_synthese.v_metadata_for_export The column jdd_id is mandatory in the view gn_synthese.v_metadata_for_export POST parameters: Use a list of id_synthese (in POST parameters) to filter the v_synthese_for_export_view """ filters = {key: request.args.getlist(key) for key, value in request.args.items()} metadata_view = GenericTable("v_metadata_for_export", "gn_synthese", None) q = DB.session.query( distinct(VSyntheseForWebApp.id_dataset), metadata_view.tableDef ).join( metadata_view.tableDef, getattr( metadata_view.tableDef.columns, current_app.config["SYNTHESE"]["EXPORT_METADATA_ID_DATASET_COL"], ) == VSyntheseForWebApp.id_dataset, ) q = synthese_query.filter_query_all_filters( VSyntheseForWebApp, q, filters, info_role ) return to_csv_resp( datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S"), data=[metadata_view.as_dict(d) for d in q.all()], separator=";", columns=[db_col.key for db_col in metadata_view.tableDef.columns], )
def export(info_role): export_view_name = blueprint.config["export_view_name"] export_geom_column = blueprint.config["export_geom_columns_name"] export_id_column_name = blueprint.config["export_id_column_name"] export_columns = blueprint.config["export_columns"] export_srid = blueprint.config["export_srid"] export_view = GenericTable( export_view_name, "pr_occtax", export_geom_column, export_srid ) releve_repository = ReleveRepository(export_view) q = releve_repository.get_filtered_query(info_role, from_generic_table=True) q = get_query_occtax_filters(request.args, export_view, q, from_generic_table=True) data = q.all() file_name = datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S") file_name = filemanager.removeDisallowedFilenameChars(file_name) export_format = request.args["format"] if "format" in request.args else "geojson" if export_format == "csv": columns = ( export_columns if len(export_columns) > 0 else [db_col.key for db_col in export_view.db_cols] ) return to_csv_resp( file_name, [export_view.as_dict(d) for d in data], columns, ";" ) elif export_format == "geojson": results = FeatureCollection( [export_view.as_geofeature(d, columns=export_columns) for d in data] ) return to_json_resp( results, as_file=True, filename=file_name, indent=4, extension="geojson" ) else: try: filemanager.delete_recursively( str(ROOT_DIR / "backend/static/shapefiles"), excluded_files=[".gitkeep"] ) db_cols = [ db_col for db_col in export_view.db_cols if db_col.key in export_columns ] dir_path = str(ROOT_DIR / "backend/static/shapefiles") export_view.as_shape( db_cols=db_cols, data=data, dir_path=dir_path, file_name=file_name ) return send_from_directory(dir_path, file_name + ".zip", as_attachment=True) except GeonatureApiError as e: message = str(e) return render_template( "error.html", error=message, redirect=current_app.config["URL_APPLICATION"] + "/#/occtax", )
def export(info_role): filters = dict(request.args) if 'limit' in filters: result_limit = filters.pop('limit')[0] else: result_limit = current_app.config['SYNTHESE']['NB_MAX_OBS_EXPORT'] export_format = filters.pop('export_format')[0] allowed_datasets = TDatasets.get_user_datasets(info_role) q = DB.session.query(VSyntheseForExport) q = synthese_query.filter_query_all_filters(VSyntheseForExport, q, filters, info_role, allowed_datasets) q = q.order_by(VSyntheseForExport.date_min.desc()) data = q.limit(result_limit) file_name = datetime.datetime.now().strftime('%Y_%m_%d_%Hh%Mm%S') file_name = filemanager.removeDisallowedFilenameChars(file_name) formated_data = [d.as_dict_ordered() for d in data] export_columns = formated_data[0].keys() if export_format == 'csv': return to_csv_resp( file_name, formated_data, separator=';', columns=export_columns, ) elif export_format == 'geojson': results = FeatureCollection(formated_data) return to_json_resp(results, as_file=True, filename=file_name, indent=4) else: filemanager.delete_recursively(str(ROOT_DIR / 'backend/static/shapefiles'), excluded_files=['.gitkeep']) dir_path = str(ROOT_DIR / 'backend/static/shapefiles') FionaShapeService.create_shapes_struct( db_cols=VSyntheseForExport.db_cols, srid=current_app.config['LOCAL_SRID'], dir_path=dir_path, file_name=file_name, col_mapping=current_app.config['SYNTHESE']['EXPORT_COLUMNS']) for row in data: geom = row.the_geom_local row_as_dict = row.as_dict_ordered() FionaShapeService.create_feature(row_as_dict, geom) FionaShapeService.save_and_zip_shapefiles() return send_from_directory(dir_path, file_name + '.zip', as_attachment=True)
def get_status(info_role): """ Route to get all the protection status of a synthese search """ filters = dict(request.args) q = (DB.session.query(distinct(VSyntheseForWebApp.cd_nom), Taxref, TaxrefProtectionArticles).join( Taxref, Taxref.cd_nom == VSyntheseForWebApp.cd_nom).join( TaxrefProtectionEspeces, TaxrefProtectionEspeces.cd_nom == VSyntheseForWebApp.cd_nom).join( TaxrefProtectionArticles, TaxrefProtectionArticles.cd_protection == TaxrefProtectionEspeces.cd_protection)) allowed_datasets = TDatasets.get_user_datasets(info_role) q = synthese_query.filter_query_all_filters(VSyntheseForWebApp, q, filters, info_role, allowed_datasets) data = q.all() protection_status = [] for d in data: taxon = d[1].as_dict() protection = d[2].as_dict() row = OrderedDict([ ('nom_complet', taxon['nom_complet']), ('nom_vern', taxon['nom_vern']), ('cd_nom', taxon['cd_nom']), ('cd_ref', taxon['cd_ref']), ('type_protection', protection['type_protection']), ('article', protection['article']), ('intitule', protection['intitule']), ('arrete', protection['arrete']), ('date_arrete', protection['date_arrete']), ('url', protection['url']), ]) protection_status.append(row) export_columns = [ 'nom_complet', 'nom_vern', 'cd_nom', 'cd_ref', 'type_protection', 'article', 'intitule', 'arrete', 'date_arrete', 'url' ] file_name = datetime.datetime.now().strftime('%Y_%m_%d_%Hh%Mm%S') return to_csv_resp( file_name, protection_status, separator=';', columns=export_columns, )
def export_visit(): parameters = request.args export_format = parameters[ 'export_format'] if 'export_format' in request.args else 'shapefile' # si on ne met rien, par défaut, on va télécharger sous format shapefile file_name = datetime.datetime.now().strftime('%Y_%m_%d_%Hh%Mm%S') # calcule l'heure du moment où on télécharge le fichier. q = (DB.session.query(ExportVisits)) if 'id_base_visit' in parameters: q = (DB.session.query(ExportVisits).filter( ExportVisits.id_base_visit == parameters['id_base_visit'])) elif 'id_base_site' in parameters: q = (DB.session.query(ExportVisits).filter( ExportVisits.id_base_site == parameters['id_base_site'])) data = q.all() features = [] if export_format == 'geojson': # si c' le format geojson for d in data: feature = d.as_geofeature('geom', 'id_area', False) # cette méthode est définie dans geonature.utils.utilssqlalchemy # 1er parametre: coordonées géométrie, 2ème: sur quel 'unité' qu'on veut les données (id_base_site? id_area?), # 3ème paramètre: récursif ou pas. features.append(feature) result = FeatureCollection(features) # puis je transforme ce tableau features --> FeatureCollection return to_json_resp(result, as_file=True, filename=file_name, indent=4) # cette méthode transforme résultat en fichier json # 5 paramètres: # +result = résultat # +status quand on télécharge : 200 si tt va bien # +fichier téléchargeable: true or false. # +nom de fichier : qu'on a déclaré un peu plus haut. # +indentation dans fichier : 4. elif export_format == 'csv': # si c' format csv tab_visit = [] for d in data: visit = d.as_dict() # transforme d en as_dict geom_wkt = to_shape(d.geom) # on transforme d.geom (actuellement format wkb) en wkt visit['geom'] = geom_wkt # puis on crée dans visit la propriété 'geom' pour mettre dedans geom_wkt tab_visit.append(visit) # on ajoute tous dans tab_visit return to_csv_resp(file_name, tab_visit, tab_visit[0].keys(), ';') # cette méthode prend 4 paramètre: nom de fichier, les données (ici c' tab_visit), # les colonnes que l'on veut afficher. Ici dans tab_visit, tous les lignes ont les mêmes "clés" = nom de colonne # on ne doit donc prendre que la 1ère ligne (1ère élément dans ce tableau et extraire les keys pour avoir toutes les colonnes) # Puis le dernier paramètre = séparateur. else: print('LAAA') # #TODO: mettre en parametre le srid dir_path = str(ROOT_DIR / 'backend/static/shapefiles') # pour télécharger format shapefiles, ce fichier va être enregistré sur le serveur # ce pkoi on a ce path... (enfin j'espère que c' ça l'explication ... ) ExportVisits.as_shape(geom_col='geom', dir_path=dir_path, srid=2154, data=data, file_name=file_name) # méthode transforme en shape: # dans sqlalchemyutils, cette méthode prend 5 paramètres (dont 1 c' self, on peut omettre) # ici il faut avoir le srid=2154 car c' projection internationale alors que dans notre table ExportsVisits, # le srid=4326 (projection française). # (db_cols doit va avec srid normalement, c' sur quelle colonne qu'on veut "extraire" les coordonées géographiques). return send_from_directory(dir_path, file_name + '.zip', as_attachment=True)
def export_status(info_role): """ Route to get all the protection status of a synthese search Parameters: - HTTP-GET: the same that the /synthese endpoint (all the filter in web app) Get the CRUVED from 'R' action because we don't give observations X/Y but only statuts and to be constistant with the data displayed in the web interface """ filters = {key: request.args.getlist(key) for key, value in request.args.items()} # initalize the select object q = select( [ distinct(VSyntheseForWebApp.cd_nom), Taxref.nom_complet, Taxref.cd_ref, Taxref.nom_vern, TaxrefProtectionArticles.type_protection, TaxrefProtectionArticles.article, TaxrefProtectionArticles.intitule, TaxrefProtectionArticles.arrete, TaxrefProtectionArticles.date_arrete, TaxrefProtectionArticles.url, ] ) synthese_query_class = SyntheseQuery(VSyntheseForWebApp, q, filters) # add join synthese_query_class.add_join(Taxref, Taxref.cd_nom, VSyntheseForWebApp.cd_nom) synthese_query_class.add_join( TaxrefProtectionEspeces, TaxrefProtectionEspeces.cd_nom, VSyntheseForWebApp.cd_nom, ) synthese_query_class.add_join( TaxrefProtectionArticles, TaxrefProtectionArticles.cd_protection, TaxrefProtectionEspeces.cd_protection, ) # filter with all get params q = synthese_query_class.filter_query_all_filters(info_role) data = DB.engine.execute(q) protection_status = [] for d in data: row = OrderedDict( [ ("nom_complet", d["nom_complet"]), ("nom_vern", d["nom_vern"]), ("cd_nom", d["cd_nom"]), ("cd_ref", d["cd_ref"]), ("type_protection", d["type_protection"]), ("article", d["article"]), ("intitule", d["intitule"]), ("arrete", d["arrete"]), ("date_arrete", d["date_arrete"]), ("url", d["url"]), ] ) protection_status.append(row) export_columns = [ "nom_complet", "nom_vern", "cd_nom", "cd_ref", "type_protection", "article", "intitule", "arrete", "date_arrete", "url", ] return to_csv_resp( datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S"), protection_status, separator=";", columns=export_columns, )
def export_observations_web(info_role): """ Optimized route for observations web export This view is customisable by the administrator Some columns arer mandatory: id_sythese, geojson and geojson_local to generate the exported files Parameters : Via POST: Use a list of id_synthese (in POST parameters) to filter the v_synthese_for_export_view Via GET: 'export_format' str<'csv', 'geojson', 'shapefiles'> """ params = request.args # set default to csv export_format = "csv" export_view = GenericTable( "v_synthese_for_export", "gn_synthese", "the_geom_local", current_app.config["LOCAL_SRID"], ) if "export_format" in params: export_format = params["export_format"] # get list of id synthese from POST id_list = request.get_json() db_cols_for_shape = [] columns_to_serialize = [] # loop over synthese config to get the columns for export for db_col in export_view.db_cols: if db_col.key in current_app.config["SYNTHESE"]["EXPORT_COLUMNS"]: db_cols_for_shape.append(db_col) columns_to_serialize.append(db_col.key) q = DB.session.query(export_view.tableDef).filter( export_view.tableDef.columns.idSynthese.in_(id_list) ) # check R and E CRUVED to know if we filter with cruved cruved = cruved_scope_for_user_in_module(info_role.id_role, module_code="SYNTHESE")[ 0 ] if cruved["R"] > cruved["E"]: # filter on cruved specifying the column # id_dataset, id_synthese, id_digitiser and observer in the v_synthese_for_export_view q = synthese_query.filter_query_with_cruved( export_view.tableDef, q, info_role, id_synthese_column=current_app.config["SYNTHESE"]["EXPORT_ID_SYNTHESE_COL"], id_dataset_column=current_app.config["SYNTHESE"]["EXPORT_ID_DATASET_COL"], observers_column=current_app.config["SYNTHESE"]["EXPORT_OBSERVERS_COL"], id_digitiser_column=current_app.config["SYNTHESE"][ "EXPORT_ID_DIGITISER_COL" ], with_generic_table=True, ) results = q.limit(current_app.config["SYNTHESE"]["NB_MAX_OBS_EXPORT"]) file_name = datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S") file_name = filemanager.removeDisallowedFilenameChars(file_name) # columns = [db_col.key for db_col in export_view.db_cols] if export_format == "csv": formated_data = [ export_view.as_dict(d, columns=columns_to_serialize) for d in results ] return to_csv_resp( file_name, formated_data, separator=";", columns=columns_to_serialize ) elif export_format == "geojson": features = [] for r in results: geometry = ast.literal_eval( getattr(r, current_app.config["SYNTHESE"]["EXPORT_GEOJSON_4326_COL"]) ) feature = Feature( geometry=geometry, properties=export_view.as_dict(r, columns=columns_to_serialize), ) features.append(feature) results = FeatureCollection(features) return to_json_resp(results, as_file=True, filename=file_name, indent=4) else: try: filemanager.delete_recursively( str(ROOT_DIR / "backend/static/shapefiles"), excluded_files=[".gitkeep"] ) dir_path = str(ROOT_DIR / "backend/static/shapefiles") export_view.as_shape( db_cols=db_cols_for_shape, data=results, geojson_col=current_app.config["SYNTHESE"]["EXPORT_GEOJSON_LOCAL_COL"], dir_path=dir_path, file_name=file_name, ) return send_from_directory(dir_path, file_name + ".zip", as_attachment=True) except GeonatureApiError as e: message = str(e) return render_template( "error.html", error=message, redirect=current_app.config["URL_APPLICATION"] + "/#/synthese", )
def export(info_role): export_view_name = blueprint.config["export_view_name"] export_geom_column = blueprint.config["export_geom_columns_name"] export_id_column_name = blueprint.config["export_id_column_name"] export_columns = blueprint.config["export_columns"] export_srid = blueprint.config["export_srid"] export_view = GenericTable(export_view_name, "pr_occtax", export_geom_column, export_srid) releve_repository = ReleveRepository(export_view) q = releve_repository.get_filtered_query(info_role, from_generic_table=True) q = get_query_occtax_filters(request.args, export_view, q, from_generic_table=True) data = q.all() file_name = datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S") file_name = filemanager.removeDisallowedFilenameChars(file_name) export_format = request.args[ "format"] if "format" in request.args else "geojson" if export_format == "csv": columns = (export_columns if len(export_columns) > 0 else [db_col.key for db_col in export_view.db_cols]) return to_csv_resp(file_name, [export_view.as_dict(d) for d in data], columns, ";") elif export_format == "geojson": results = FeatureCollection([ export_view.as_geofeature(d, columns=export_columns) for d in data ]) return to_json_resp(results, as_file=True, filename=file_name, indent=4, extension="geojson") else: try: filemanager.delete_recursively(str(ROOT_DIR / "backend/static/shapefiles"), excluded_files=[".gitkeep"]) db_cols = [ db_col for db_col in export_view.db_cols if db_col.key in export_columns ] dir_path = str(ROOT_DIR / "backend/static/shapefiles") export_view.as_shape(db_cols=db_cols, data=data, dir_path=dir_path, file_name=file_name) return send_from_directory(dir_path, file_name + ".zip", as_attachment=True) except GeonatureApiError as e: message = str(e) return render_template( "error.html", error=message, redirect=current_app.config["URL_APPLICATION"] + "/#/occtax", )
def export_visit(info_role): ''' Télécharge les données d'une visite (ou des visites ) ''' parameters = request.args export_format = parameters[ 'export_format'] if 'export_format' in request.args else 'shapefile' file_name = datetime.datetime.now().strftime('%Y_%m_%d_%Hh%Mm%S') q = (DB.session.query(ExportVisits)) if 'id_base_visit' in parameters: q = (DB.session.query(ExportVisits).filter( ExportVisits.idbvisit == parameters['id_base_visit'])) elif 'id_releve_plot' in parameters: q = (DB.session.query(ExportVisits).filter( ExportVisits.idreleve == parameters['id_releve_plot'])) elif 'id_base_site' in parameters: q = (DB.session.query(ExportVisits).filter( ExportVisits.idbsite == parameters['id_base_site'])) elif 'organisme' in parameters: q = (DB.session.query(ExportVisits).filter( ExportVisits.organisme == parameters['organisme'])) elif 'year' in parameters: q = (DB.session.query(ExportVisits).filter( func.date_part('year', ExportVisits.visitdate) == parameters['year'])) elif 'cd_hab' in parameters: q = (DB.session.query(ExportVisits).filter( ExportVisits.cd_hab == parameters['cd_hab'])) data = q.all() features = [] # formate data cor_hab_taxon = [] flag_cdhab = 0 strates = [] tab_header = [] column_name = get_base_column_name() column_name_pro = get_pro_column_name() mapping_columns = get_mapping_columns() strates_list = get_stratelist_plot() tab_visit = [] for d in data: visit = d.as_dict() # Get list hab/taxon cd_hab = visit['cd_hab'] if flag_cdhab != cd_hab: cor_hab_taxon = get_taxonlist_by_cdhab(cd_hab) flag_cdhab = cd_hab # remove geom Type geom_wkt = to_shape(d.geom) geom_array = array(geom_wkt) visit['geom_wkt'] = geom_wkt if export_format == 'csv' or export_format == 'shapefile': visit['geom'] = d.geom if geom_wkt.type.lower() == 'linestring': visit['geom'] = str(geom_array[0]) + " / " + str(geom_array[1]) # remove html tag visit['lbhab'] = striphtml(visit['lbhab']) # Translate label column visit = dict((mapping_columns[key], value) for (key, value) in visit.items() if key in mapping_columns) # pivot strate if visit['covstrate']: for strate, cover in visit['covstrate'].items(): visit[strate] = " % " + str(cover) if 'covstrate' in visit: visit.pop('covstrate') # pivot taxons if visit['covtaxons']: for taxon, cover in visit['covtaxons'].items(): visit[taxon] = " % " + str(cover) if 'covtaxons' in visit: visit.pop('covtaxons') tab_visit.append(visit) if export_format == 'geojson': for d in tab_visit: feature = mapping(d['geom_wkt']) d.pop('geom_wkt', None) properties = d features.append(feature) features.append(properties) result = FeatureCollection(features) return to_json_resp(result, as_file=True, filename=file_name, indent=4) elif export_format == 'csv': tab_header = column_name + [clean_string(x) for x in strates_list] + [ clean_string(x) for x in cor_hab_taxon ] + column_name_pro return to_csv_resp(file_name, tab_visit, tab_header, ';') else: dir_path = str(ROOT_DIR / 'backend/static/shapefiles') FionaShapeService.create_shapes_struct( db_cols=ExportVisits.__mapper__.c, srid=4326, dir_path=dir_path, file_name=file_name, ) for row in data: FionaShapeService.create_feature(row.as_dict(), row.geom) FionaShapeService.save_and_zip_shapefiles() return send_from_directory(dir_path, file_name + '.zip', as_attachment=True)
def export_visit(): ''' Télécharge les données d'une visite (ou des visites ) ''' parameters = request.args # q = q.filter(TInfoSite.id_base_site == parameters['id_base_site']) export_format = parameters[ 'export_format'] if 'export_format' in request.args else 'shapefile' file_name = datetime.datetime.now().strftime('%Y_%m_%d_%Hh%Mm%S') q = (DB.session.query(ExportVisits)) if 'id_base_visit' in parameters: q = (DB.session.query(ExportVisits).filter( ExportVisits.id_base_visit == parameters['id_base_visit'])) elif 'id_base_site' in parameters: q = (DB.session.query(ExportVisits).filter( ExportVisits.id_base_site == parameters['id_base_site'])) elif 'organisme' in parameters: q = (DB.session.query(ExportVisits).filter( ExportVisits.organisme == parameters['organisme'])) elif 'commune' in parameters: q = (DB.session.query(ExportVisits).filter( ExportVisits.area_name == parameters['commune'])) elif 'year' in parameters: q = (DB.session.query(ExportVisits).filter( func.date_part('year', ExportVisits.visit_date) == parameters['year'])) elif 'cd_nom' in parameters: q = (DB.session.query(ExportVisits).filter( ExportVisits.cd_nom == parameters['cd_nom'])) data = q.all() features = [] if export_format == 'geojson': for d in data: feature = d.as_geofeature('geom', 'id_area', False) features.append(feature) result = FeatureCollection(features) return to_json_resp(result, as_file=True, filename=file_name, indent=4) elif export_format == 'csv': tab_visit = [] for d in data: visit = d.as_dict() geom_wkt = to_shape(d.geom) visit['geom'] = geom_wkt tab_visit.append(visit) return to_csv_resp(file_name, tab_visit, tab_visit[0].keys(), ';') else: dir_path = str(ROOT_DIR / 'backend/static/shapefiles') FionaShapeService.create_shapes_struct( db_cols=ExportVisits.__mapper__.c, srid=2154, dir_path=dir_path, file_name=file_name, ) for row in data: FionaShapeService.create_feature(row.as_dict(), row.geom) FionaShapeService.save_and_zip_shapefiles() return send_from_directory(dir_path, file_name + '.zip', as_attachment=True)
def export_status(info_role): """ Route to get all the protection status of a synthese search .. :quickref: Synthese; Parameters: - HTTP-GET: the same that the /synthese endpoint (all the filter in web app) Get the CRUVED from 'R' action because we don't give observations X/Y but only statuts and to be constistant with the data displayed in the web interface """ filters = {key: request.args.getlist(key) for key, value in request.args.items()} # initalize the select object q = select( [ distinct(VSyntheseForWebApp.cd_nom), Taxref.nom_complet, Taxref.cd_ref, Taxref.nom_vern, TaxrefProtectionArticles.type_protection, TaxrefProtectionArticles.article, TaxrefProtectionArticles.intitule, TaxrefProtectionArticles.arrete, TaxrefProtectionArticles.date_arrete, TaxrefProtectionArticles.url, ] ) synthese_query_class = SyntheseQuery(VSyntheseForWebApp, q, filters) # add join synthese_query_class.add_join(Taxref, Taxref.cd_nom, VSyntheseForWebApp.cd_nom) synthese_query_class.add_join( TaxrefProtectionEspeces, TaxrefProtectionEspeces.cd_nom, VSyntheseForWebApp.cd_nom, ) synthese_query_class.add_join( TaxrefProtectionArticles, TaxrefProtectionArticles.cd_protection, TaxrefProtectionEspeces.cd_protection, ) # filter with all get params q = synthese_query_class.filter_query_all_filters(info_role) data = DB.engine.execute(q) protection_status = [] for d in data: row = OrderedDict( [ ("nom_complet", d["nom_complet"]), ("nom_vern", d["nom_vern"]), ("cd_nom", d["cd_nom"]), ("cd_ref", d["cd_ref"]), ("type_protection", d["type_protection"]), ("article", d["article"]), ("intitule", d["intitule"]), ("arrete", d["arrete"]), ("date_arrete", d["date_arrete"]), ("url", d["url"]), ] ) protection_status.append(row) export_columns = [ "nom_complet", "nom_vern", "cd_nom", "cd_ref", "type_protection", "article", "intitule", "arrete", "date_arrete", "url", ] return to_csv_resp( datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S"), protection_status, separator=";", columns=export_columns, )
def export_observations_web(info_role): """ Optimized route for observations web export .. :quickref: Synthese; This view is customisable by the administrator Some columns arer mandatory: id_sythese, geojson and geojson_local to generate the exported files POST parameters: Use a list of id_synthese (in POST parameters) to filter the v_synthese_for_export_view :query str export_format: str<'csv', 'geojson', 'shapefiles'> """ params = request.args # set default to csv export_format = "csv" export_view = GenericTable( "v_synthese_for_export", "gn_synthese", "the_geom_local", current_app.config["LOCAL_SRID"], ) if "export_format" in params: export_format = params["export_format"] # get list of id synthese from POST id_list = request.get_json() db_cols_for_shape = [] columns_to_serialize = [] # loop over synthese config to get the columns for export for db_col in export_view.db_cols: if db_col.key in current_app.config["SYNTHESE"]["EXPORT_COLUMNS"]: db_cols_for_shape.append(db_col) columns_to_serialize.append(db_col.key) q = DB.session.query(export_view.tableDef).filter( export_view.tableDef.columns.idSynthese.in_(id_list) ) # check R and E CRUVED to know if we filter with cruved cruved = cruved_scope_for_user_in_module(info_role.id_role, module_code="SYNTHESE")[ 0 ] if cruved["R"] > cruved["E"]: # filter on cruved specifying the column # id_dataset, id_synthese, id_digitiser and observer in the v_synthese_for_export_view q = synthese_query.filter_query_with_cruved( export_view.tableDef, q, info_role, id_synthese_column=current_app.config["SYNTHESE"]["EXPORT_ID_SYNTHESE_COL"], id_dataset_column=current_app.config["SYNTHESE"]["EXPORT_ID_DATASET_COL"], observers_column=current_app.config["SYNTHESE"]["EXPORT_OBSERVERS_COL"], id_digitiser_column=current_app.config["SYNTHESE"][ "EXPORT_ID_DIGITISER_COL" ], with_generic_table=True, ) results = q.limit(current_app.config["SYNTHESE"]["NB_MAX_OBS_EXPORT"]) file_name = datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S") file_name = filemanager.removeDisallowedFilenameChars(file_name) # columns = [db_col.key for db_col in export_view.db_cols] if export_format == "csv": formated_data = [ export_view.as_dict(d, columns=columns_to_serialize) for d in results ] return to_csv_resp( file_name, formated_data, separator=";", columns=columns_to_serialize ) elif export_format == "geojson": features = [] for r in results: geometry = ast.literal_eval( getattr(r, current_app.config["SYNTHESE"]["EXPORT_GEOJSON_4326_COL"]) ) feature = Feature( geometry=geometry, properties=export_view.as_dict(r, columns=columns_to_serialize), ) features.append(feature) results = FeatureCollection(features) return to_json_resp(results, as_file=True, filename=file_name, indent=4) else: try: filemanager.delete_recursively( str(ROOT_DIR / "backend/static/shapefiles"), excluded_files=[".gitkeep"] ) dir_path = str(ROOT_DIR / "backend/static/shapefiles") export_view.as_shape( db_cols=db_cols_for_shape, data=results, geojson_col=current_app.config["SYNTHESE"]["EXPORT_GEOJSON_LOCAL_COL"], dir_path=dir_path, file_name=file_name, ) return send_from_directory(dir_path, file_name + ".zip", as_attachment=True) except GeonatureApiError as e: message = str(e) return render_template( "error.html", error=message, redirect=current_app.config["URL_APPLICATION"] + "/#/synthese", )
def export(info_role): export_view_name = blueprint.config['export_view_name'] export_geom_column = blueprint.config['export_geom_columns_name'] export_id_column_name = blueprint.config['export_id_column_name'] export_columns = blueprint.config['export_columns'] export_srid = blueprint.config['export_srid'] export_view = GenericTable(export_view_name, 'pr_occtax', export_geom_column, export_srid) releve_repository = ReleveRepository(export_view) q = releve_repository.get_filtered_query(info_role, from_generic_table=True) q = get_query_occtax_filters(request.args, export_view, q, from_generic_table=True) data = q.all() file_name = datetime.datetime.now().strftime('%Y_%m_%d_%Hh%Mm%S') file_name = filemanager.removeDisallowedFilenameChars(file_name) export_format = request.args[ 'format'] if 'format' in request.args else 'geojson' if export_format == 'csv': columns = export_columns if len(export_columns) > 0 else [ db_col.key for db_col in export_view.db_cols ] return to_csv_resp(file_name, [export_view.as_dict(d) for d in data], columns, ';') elif export_format == 'geojson': results = FeatureCollection([ export_view.as_geofeature(d, columns=export_columns) for d in data ]) return to_json_resp(results, as_file=True, filename=file_name, indent=4) else: try: filemanager.delete_recursively(str(ROOT_DIR / 'backend/static/shapefiles'), excluded_files=['.gitkeep']) db_cols = [ db_col for db_col in export_view.db_cols if db_col.key in export_columns ] dir_path = str(ROOT_DIR / 'backend/static/shapefiles') export_view.as_shape( db_cols=db_cols, data=data, dir_path=dir_path, file_name=file_name, ) return send_from_directory(dir_path, file_name + '.zip', as_attachment=True) except GeonatureApiError as e: message = str(e) return render_template('error.html', error=message, redirect=current_app.config['URL_APPLICATION'] + "/#/occtax")
def export_taxon_web(info_role): """Optimized route for taxon web export. .. :quickref: Synthese; This view is customisable by the administrator Some columns are mandatory: cd_ref POST parameters: Use a list of cd_ref (in POST parameters) to filter the v_synthese_taxon_for_export_view :query str export_format: str<'csv'> """ taxon_view = GenericTable( "v_synthese_taxon_for_export_view", "gn_synthese", None ) columns = taxon_view.tableDef.columns # Test de conformité de la vue v_synthese_for_export_view try: assert hasattr(taxon_view.tableDef.columns, "cd_ref") except AssertionError as e: return {"msg": """ View v_synthese_taxon_for_export_view must have a cd_ref column \n trace: {} """.format(str(e)) }, 500 id_list = request.get_json() # check R and E CRUVED to know if we filter with cruved cruved = cruved_scope_for_user_in_module( info_role.id_role, module_code="SYNTHESE" )[0] subq = DB.session.query( VSyntheseForWebApp.cd_ref, func.count(distinct( VSyntheseForWebApp.id_synthese )).label("nb_obs"), func.min(VSyntheseForWebApp.date_min).label("date_min"), func.max(VSyntheseForWebApp.date_max).label("date_max") ).filter( VSyntheseForWebApp.id_synthese.in_(id_list) ).group_by(VSyntheseForWebApp.cd_ref) if cruved["R"] > cruved["E"]: # filter on cruved specifying the column # id_dataset, id_synthese, id_digitiser # and observer in the v_synthese_for_export_view subq = synthese_query.filter_query_with_cruved( VSyntheseForWebApp, subq, info_role, id_synthese_column="id_synthese", id_dataset_column="id_dataset", observers_column="observers", id_digitiser_column="id_digitiser", with_generic_table=False, ) subq = subq.subquery() q = DB.session.query( *columns, subq.c.nb_obs, subq.c.date_min, subq.c.date_max ).join( subq, subq.c.cd_ref == columns.cd_ref ) return to_csv_resp( datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S"), data=serializeQuery(q.all(), q.column_descriptions), separator=";", columns=[db_col.key for db_col in columns] + ["nb_obs", "date_min", "date_max"] )
def export_ap(): """ Télécharge les données d'une aire de présence """ parameters = request.args export_format = (parameters["export_format"] if "export_format" in request.args else "shapefile") file_name = datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S") q = DB.session.query(ExportAp) if "indexap" in parameters: q = DB.session.query(ExportAp).filter( ExportAp.indexap == parameters["indexap"]) elif "indexzp" in parameters: q = DB.session.query(ExportAp).filter( ExportAp.id_base_site == parameters["indexzp"]) elif "organisme" in parameters: q = DB.session.query(ExportAp).filter( ExportAp.organisme == parameters["organisme"]) elif "commune" in parameters: q = DB.session.query(ExportAp).filter( ExportAp.area_name == parameters["commune"]) elif "year" in parameters: q = DB.session.query(ExportAp).filter( func.date_part("year", ExportAp.visit_date) == parameters["year"]) elif "cd_nom" in parameters: q = DB.session.query(ExportAp).filter( ExportAp.cd_nom == parameters["cd_nom"]) data = q.all() features = [] if export_format == "geojson": for d in data: feature = d.as_geofeature("geom_local", "indexap", False) features.append(feature) result = FeatureCollection(features) return to_json_resp(result, as_file=True, filename=file_name, indent=4) elif export_format == "csv": tab_ap = [] for d in data: ap = d.as_dict() geom_wkt = to_shape(d.geom_local) ap["geom_local"] = geom_wkt tab_ap.append(ap) return to_csv_resp(file_name, tab_ap, tab_ap[0].keys(), ";") else: dir_path = str(ROOT_DIR / "backend/static/shapefiles") FionaShapeService.create_shapes_struct( db_cols=ExportAp.__mapper__.c, srid=2154, dir_path=dir_path, file_name=file_name, ) for row in data: FionaShapeService.create_feature(row.as_dict(), row.geom_local) FionaShapeService.save_and_zip_shapefiles() return send_from_directory(dir_path, file_name + ".zip", as_attachment=True)