def export(info_role): export_view_name = blueprint.config["export_view_name"] export_geom_column = blueprint.config["export_geom_columns_name"] export_id_column_name = blueprint.config["export_id_column_name"] export_columns = blueprint.config["export_columns"] export_srid = blueprint.config["export_srid"] export_view = GenericTable( export_view_name, "pr_occtax", export_geom_column, export_srid ) releve_repository = ReleveRepository(export_view) q = releve_repository.get_filtered_query(info_role, from_generic_table=True) q = get_query_occtax_filters(request.args, export_view, q, from_generic_table=True) data = q.all() file_name = datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S") file_name = filemanager.removeDisallowedFilenameChars(file_name) export_format = request.args["format"] if "format" in request.args else "geojson" if export_format == "csv": columns = ( export_columns if len(export_columns) > 0 else [db_col.key for db_col in export_view.db_cols] ) return to_csv_resp( file_name, [export_view.as_dict(d) for d in data], columns, ";" ) elif export_format == "geojson": results = FeatureCollection( [export_view.as_geofeature(d, columns=export_columns) for d in data] ) return to_json_resp( results, as_file=True, filename=file_name, indent=4, extension="geojson" ) else: try: filemanager.delete_recursively( str(ROOT_DIR / "backend/static/shapefiles"), excluded_files=[".gitkeep"] ) db_cols = [ db_col for db_col in export_view.db_cols if db_col.key in export_columns ] dir_path = str(ROOT_DIR / "backend/static/shapefiles") export_view.as_shape( db_cols=db_cols, data=data, dir_path=dir_path, file_name=file_name ) return send_from_directory(dir_path, file_name + ".zip", as_attachment=True) except GeonatureApiError as e: message = str(e) return render_template( "error.html", error=message, redirect=current_app.config["URL_APPLICATION"] + "/#/occtax", )
def export(info_role): filters = dict(request.args) if 'limit' in filters: result_limit = filters.pop('limit')[0] else: result_limit = current_app.config['SYNTHESE']['NB_MAX_OBS_EXPORT'] export_format = filters.pop('export_format')[0] allowed_datasets = TDatasets.get_user_datasets(info_role) q = DB.session.query(VSyntheseForExport) q = synthese_query.filter_query_all_filters(VSyntheseForExport, q, filters, info_role, allowed_datasets) q = q.order_by(VSyntheseForExport.date_min.desc()) data = q.limit(result_limit) file_name = datetime.datetime.now().strftime('%Y_%m_%d_%Hh%Mm%S') file_name = filemanager.removeDisallowedFilenameChars(file_name) formated_data = [d.as_dict_ordered() for d in data] export_columns = formated_data[0].keys() if export_format == 'csv': return to_csv_resp( file_name, formated_data, separator=';', columns=export_columns, ) elif export_format == 'geojson': results = FeatureCollection(formated_data) return to_json_resp(results, as_file=True, filename=file_name, indent=4) else: filemanager.delete_recursively(str(ROOT_DIR / 'backend/static/shapefiles'), excluded_files=['.gitkeep']) dir_path = str(ROOT_DIR / 'backend/static/shapefiles') FionaShapeService.create_shapes_struct( db_cols=VSyntheseForExport.db_cols, srid=current_app.config['LOCAL_SRID'], dir_path=dir_path, file_name=file_name, col_mapping=current_app.config['SYNTHESE']['EXPORT_COLUMNS']) for row in data: geom = row.the_geom_local row_as_dict = row.as_dict_ordered() FionaShapeService.create_feature(row_as_dict, geom) FionaShapeService.save_and_zip_shapefiles() return send_from_directory(dir_path, file_name + '.zip', as_attachment=True)
def etalab_export(): """ TODO : METHODE NON FONCTIONNELLE A EVALUEE """ if not blueprint.config.get('etalab_export'): return to_json_resp( { 'api_error': 'etalab_disabled', 'message': 'Etalab export is disabled' }, status=501) from datetime import time from geonature.utils.env import DB from .rdf import OccurrenceStore conf = current_app.config.get('EXPORTS') export_etalab = conf.get('etalab_export') seeded = False if os.path.isfile(export_etalab): seeded = True midnight = datetime.combine(datetime.today(), time.min) mtime = datetime.fromtimestamp(os.path.getmtime(export_etalab)) ts_delta = mtime - midnight if not seeded or ts_delta.total_seconds() < 0: store = OccurrenceStore() query = GenericQuery(DB.session, 'export_occtax_sinp', 'pr_occtax', geometry_field=None, filters=[]) data = query.return_query() for record in data.get('items'): event = store.build_event(record) obs = store.build_human_observation(event, record) store.build_location(obs, record) occurrence = store.build_occurrence(event, record) organism = store.build_organism(occurrence, record) identification = store.build_identification(organism, record) store.build_taxon(identification, record) try: with open(export_etalab, 'w+b') as xp: store.save(store_uri=xp) except FileNotFoundError: response = Response( response="FileNotFoundError : {}".format(export_etalab), status=500, mimetype='application/json') return response return send_from_directory(os.path.dirname(export_etalab), os.path.basename(export_etalab))
def export_visit(): parameters = request.args export_format = parameters[ 'export_format'] if 'export_format' in request.args else 'shapefile' # si on ne met rien, par défaut, on va télécharger sous format shapefile file_name = datetime.datetime.now().strftime('%Y_%m_%d_%Hh%Mm%S') # calcule l'heure du moment où on télécharge le fichier. q = (DB.session.query(ExportVisits)) if 'id_base_visit' in parameters: q = (DB.session.query(ExportVisits).filter( ExportVisits.id_base_visit == parameters['id_base_visit'])) elif 'id_base_site' in parameters: q = (DB.session.query(ExportVisits).filter( ExportVisits.id_base_site == parameters['id_base_site'])) data = q.all() features = [] if export_format == 'geojson': # si c' le format geojson for d in data: feature = d.as_geofeature('geom', 'id_area', False) # cette méthode est définie dans geonature.utils.utilssqlalchemy # 1er parametre: coordonées géométrie, 2ème: sur quel 'unité' qu'on veut les données (id_base_site? id_area?), # 3ème paramètre: récursif ou pas. features.append(feature) result = FeatureCollection(features) # puis je transforme ce tableau features --> FeatureCollection return to_json_resp(result, as_file=True, filename=file_name, indent=4) # cette méthode transforme résultat en fichier json # 5 paramètres: # +result = résultat # +status quand on télécharge : 200 si tt va bien # +fichier téléchargeable: true or false. # +nom de fichier : qu'on a déclaré un peu plus haut. # +indentation dans fichier : 4. elif export_format == 'csv': # si c' format csv tab_visit = [] for d in data: visit = d.as_dict() # transforme d en as_dict geom_wkt = to_shape(d.geom) # on transforme d.geom (actuellement format wkb) en wkt visit['geom'] = geom_wkt # puis on crée dans visit la propriété 'geom' pour mettre dedans geom_wkt tab_visit.append(visit) # on ajoute tous dans tab_visit return to_csv_resp(file_name, tab_visit, tab_visit[0].keys(), ';') # cette méthode prend 4 paramètre: nom de fichier, les données (ici c' tab_visit), # les colonnes que l'on veut afficher. Ici dans tab_visit, tous les lignes ont les mêmes "clés" = nom de colonne # on ne doit donc prendre que la 1ère ligne (1ère élément dans ce tableau et extraire les keys pour avoir toutes les colonnes) # Puis le dernier paramètre = séparateur. else: print('LAAA') # #TODO: mettre en parametre le srid dir_path = str(ROOT_DIR / 'backend/static/shapefiles') # pour télécharger format shapefiles, ce fichier va être enregistré sur le serveur # ce pkoi on a ce path... (enfin j'espère que c' ça l'explication ... ) ExportVisits.as_shape(geom_col='geom', dir_path=dir_path, srid=2154, data=data, file_name=file_name) # méthode transforme en shape: # dans sqlalchemyutils, cette méthode prend 5 paramètres (dont 1 c' self, on peut omettre) # ici il faut avoir le srid=2154 car c' projection internationale alors que dans notre table ExportsVisits, # le srid=4326 (projection française). # (db_cols doit va avec srid normalement, c' sur quelle colonne qu'on veut "extraire" les coordonées géographiques). return send_from_directory(dir_path, file_name + '.zip', as_attachment=True)
def export_observations_web(info_role): """ Optimized route for observations web export This view is customisable by the administrator Some columns arer mandatory: id_sythese, geojson and geojson_local to generate the exported files Parameters : Via POST: Use a list of id_synthese (in POST parameters) to filter the v_synthese_for_export_view Via GET: 'export_format' str<'csv', 'geojson', 'shapefiles'> """ params = request.args # set default to csv export_format = "csv" export_view = GenericTable( "v_synthese_for_export", "gn_synthese", "the_geom_local", current_app.config["LOCAL_SRID"], ) if "export_format" in params: export_format = params["export_format"] # get list of id synthese from POST id_list = request.get_json() db_cols_for_shape = [] columns_to_serialize = [] # loop over synthese config to get the columns for export for db_col in export_view.db_cols: if db_col.key in current_app.config["SYNTHESE"]["EXPORT_COLUMNS"]: db_cols_for_shape.append(db_col) columns_to_serialize.append(db_col.key) q = DB.session.query(export_view.tableDef).filter( export_view.tableDef.columns.idSynthese.in_(id_list) ) # check R and E CRUVED to know if we filter with cruved cruved = cruved_scope_for_user_in_module(info_role.id_role, module_code="SYNTHESE")[ 0 ] if cruved["R"] > cruved["E"]: # filter on cruved specifying the column # id_dataset, id_synthese, id_digitiser and observer in the v_synthese_for_export_view q = synthese_query.filter_query_with_cruved( export_view.tableDef, q, info_role, id_synthese_column=current_app.config["SYNTHESE"]["EXPORT_ID_SYNTHESE_COL"], id_dataset_column=current_app.config["SYNTHESE"]["EXPORT_ID_DATASET_COL"], observers_column=current_app.config["SYNTHESE"]["EXPORT_OBSERVERS_COL"], id_digitiser_column=current_app.config["SYNTHESE"][ "EXPORT_ID_DIGITISER_COL" ], with_generic_table=True, ) results = q.limit(current_app.config["SYNTHESE"]["NB_MAX_OBS_EXPORT"]) file_name = datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S") file_name = filemanager.removeDisallowedFilenameChars(file_name) # columns = [db_col.key for db_col in export_view.db_cols] if export_format == "csv": formated_data = [ export_view.as_dict(d, columns=columns_to_serialize) for d in results ] return to_csv_resp( file_name, formated_data, separator=";", columns=columns_to_serialize ) elif export_format == "geojson": features = [] for r in results: geometry = ast.literal_eval( getattr(r, current_app.config["SYNTHESE"]["EXPORT_GEOJSON_4326_COL"]) ) feature = Feature( geometry=geometry, properties=export_view.as_dict(r, columns=columns_to_serialize), ) features.append(feature) results = FeatureCollection(features) return to_json_resp(results, as_file=True, filename=file_name, indent=4) else: try: filemanager.delete_recursively( str(ROOT_DIR / "backend/static/shapefiles"), excluded_files=[".gitkeep"] ) dir_path = str(ROOT_DIR / "backend/static/shapefiles") export_view.as_shape( db_cols=db_cols_for_shape, data=results, geojson_col=current_app.config["SYNTHESE"]["EXPORT_GEOJSON_LOCAL_COL"], dir_path=dir_path, file_name=file_name, ) return send_from_directory(dir_path, file_name + ".zip", as_attachment=True) except GeonatureApiError as e: message = str(e) return render_template( "error.html", error=message, redirect=current_app.config["URL_APPLICATION"] + "/#/synthese", )
def export(info_role): export_view_name = blueprint.config["export_view_name"] export_geom_column = blueprint.config["export_geom_columns_name"] export_id_column_name = blueprint.config["export_id_column_name"] export_columns = blueprint.config["export_columns"] export_srid = blueprint.config["export_srid"] export_view = GenericTable(export_view_name, "pr_occtax", export_geom_column, export_srid) releve_repository = ReleveRepository(export_view) q = releve_repository.get_filtered_query(info_role, from_generic_table=True) q = get_query_occtax_filters(request.args, export_view, q, from_generic_table=True) data = q.all() file_name = datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S") file_name = filemanager.removeDisallowedFilenameChars(file_name) export_format = request.args[ "format"] if "format" in request.args else "geojson" if export_format == "csv": columns = (export_columns if len(export_columns) > 0 else [db_col.key for db_col in export_view.db_cols]) return to_csv_resp(file_name, [export_view.as_dict(d) for d in data], columns, ";") elif export_format == "geojson": results = FeatureCollection([ export_view.as_geofeature(d, columns=export_columns) for d in data ]) return to_json_resp(results, as_file=True, filename=file_name, indent=4, extension="geojson") else: try: filemanager.delete_recursively(str(ROOT_DIR / "backend/static/shapefiles"), excluded_files=[".gitkeep"]) db_cols = [ db_col for db_col in export_view.db_cols if db_col.key in export_columns ] dir_path = str(ROOT_DIR / "backend/static/shapefiles") export_view.as_shape(db_cols=db_cols, data=data, dir_path=dir_path, file_name=file_name) return send_from_directory(dir_path, file_name + ".zip", as_attachment=True) except GeonatureApiError as e: message = str(e) return render_template( "error.html", error=message, redirect=current_app.config["URL_APPLICATION"] + "/#/occtax", )
def export_visit(info_role): ''' Télécharge les données d'une visite (ou des visites ) ''' parameters = request.args export_format = parameters[ 'export_format'] if 'export_format' in request.args else 'shapefile' file_name = datetime.datetime.now().strftime('%Y_%m_%d_%Hh%Mm%S') q = (DB.session.query(ExportVisits)) if 'id_base_visit' in parameters: q = (DB.session.query(ExportVisits).filter( ExportVisits.idbvisit == parameters['id_base_visit'])) elif 'id_releve_plot' in parameters: q = (DB.session.query(ExportVisits).filter( ExportVisits.idreleve == parameters['id_releve_plot'])) elif 'id_base_site' in parameters: q = (DB.session.query(ExportVisits).filter( ExportVisits.idbsite == parameters['id_base_site'])) elif 'organisme' in parameters: q = (DB.session.query(ExportVisits).filter( ExportVisits.organisme == parameters['organisme'])) elif 'year' in parameters: q = (DB.session.query(ExportVisits).filter( func.date_part('year', ExportVisits.visitdate) == parameters['year'])) elif 'cd_hab' in parameters: q = (DB.session.query(ExportVisits).filter( ExportVisits.cd_hab == parameters['cd_hab'])) data = q.all() features = [] # formate data cor_hab_taxon = [] flag_cdhab = 0 strates = [] tab_header = [] column_name = get_base_column_name() column_name_pro = get_pro_column_name() mapping_columns = get_mapping_columns() strates_list = get_stratelist_plot() tab_visit = [] for d in data: visit = d.as_dict() # Get list hab/taxon cd_hab = visit['cd_hab'] if flag_cdhab != cd_hab: cor_hab_taxon = get_taxonlist_by_cdhab(cd_hab) flag_cdhab = cd_hab # remove geom Type geom_wkt = to_shape(d.geom) geom_array = array(geom_wkt) visit['geom_wkt'] = geom_wkt if export_format == 'csv' or export_format == 'shapefile': visit['geom'] = d.geom if geom_wkt.type.lower() == 'linestring': visit['geom'] = str(geom_array[0]) + " / " + str(geom_array[1]) # remove html tag visit['lbhab'] = striphtml(visit['lbhab']) # Translate label column visit = dict((mapping_columns[key], value) for (key, value) in visit.items() if key in mapping_columns) # pivot strate if visit['covstrate']: for strate, cover in visit['covstrate'].items(): visit[strate] = " % " + str(cover) if 'covstrate' in visit: visit.pop('covstrate') # pivot taxons if visit['covtaxons']: for taxon, cover in visit['covtaxons'].items(): visit[taxon] = " % " + str(cover) if 'covtaxons' in visit: visit.pop('covtaxons') tab_visit.append(visit) if export_format == 'geojson': for d in tab_visit: feature = mapping(d['geom_wkt']) d.pop('geom_wkt', None) properties = d features.append(feature) features.append(properties) result = FeatureCollection(features) return to_json_resp(result, as_file=True, filename=file_name, indent=4) elif export_format == 'csv': tab_header = column_name + [clean_string(x) for x in strates_list] + [ clean_string(x) for x in cor_hab_taxon ] + column_name_pro return to_csv_resp(file_name, tab_visit, tab_header, ';') else: dir_path = str(ROOT_DIR / 'backend/static/shapefiles') FionaShapeService.create_shapes_struct( db_cols=ExportVisits.__mapper__.c, srid=4326, dir_path=dir_path, file_name=file_name, ) for row in data: FionaShapeService.create_feature(row.as_dict(), row.geom) FionaShapeService.save_and_zip_shapefiles() return send_from_directory(dir_path, file_name + '.zip', as_attachment=True)
def export_visit(): ''' Télécharge les données d'une visite (ou des visites ) ''' parameters = request.args # q = q.filter(TInfoSite.id_base_site == parameters['id_base_site']) export_format = parameters[ 'export_format'] if 'export_format' in request.args else 'shapefile' file_name = datetime.datetime.now().strftime('%Y_%m_%d_%Hh%Mm%S') q = (DB.session.query(ExportVisits)) if 'id_base_visit' in parameters: q = (DB.session.query(ExportVisits).filter( ExportVisits.id_base_visit == parameters['id_base_visit'])) elif 'id_base_site' in parameters: q = (DB.session.query(ExportVisits).filter( ExportVisits.id_base_site == parameters['id_base_site'])) elif 'organisme' in parameters: q = (DB.session.query(ExportVisits).filter( ExportVisits.organisme == parameters['organisme'])) elif 'commune' in parameters: q = (DB.session.query(ExportVisits).filter( ExportVisits.area_name == parameters['commune'])) elif 'year' in parameters: q = (DB.session.query(ExportVisits).filter( func.date_part('year', ExportVisits.visit_date) == parameters['year'])) elif 'cd_nom' in parameters: q = (DB.session.query(ExportVisits).filter( ExportVisits.cd_nom == parameters['cd_nom'])) data = q.all() features = [] if export_format == 'geojson': for d in data: feature = d.as_geofeature('geom', 'id_area', False) features.append(feature) result = FeatureCollection(features) return to_json_resp(result, as_file=True, filename=file_name, indent=4) elif export_format == 'csv': tab_visit = [] for d in data: visit = d.as_dict() geom_wkt = to_shape(d.geom) visit['geom'] = geom_wkt tab_visit.append(visit) return to_csv_resp(file_name, tab_visit, tab_visit[0].keys(), ';') else: dir_path = str(ROOT_DIR / 'backend/static/shapefiles') FionaShapeService.create_shapes_struct( db_cols=ExportVisits.__mapper__.c, srid=2154, dir_path=dir_path, file_name=file_name, ) for row in data: FionaShapeService.create_feature(row.as_dict(), row.geom) FionaShapeService.save_and_zip_shapefiles() return send_from_directory(dir_path, file_name + '.zip', as_attachment=True)
def export_observations_web(info_role): """ Optimized route for observations web export .. :quickref: Synthese; This view is customisable by the administrator Some columns arer mandatory: id_sythese, geojson and geojson_local to generate the exported files POST parameters: Use a list of id_synthese (in POST parameters) to filter the v_synthese_for_export_view :query str export_format: str<'csv', 'geojson', 'shapefiles'> """ params = request.args # set default to csv export_format = "csv" export_view = GenericTable( "v_synthese_for_export", "gn_synthese", "the_geom_local", current_app.config["LOCAL_SRID"], ) if "export_format" in params: export_format = params["export_format"] # get list of id synthese from POST id_list = request.get_json() db_cols_for_shape = [] columns_to_serialize = [] # loop over synthese config to get the columns for export for db_col in export_view.db_cols: if db_col.key in current_app.config["SYNTHESE"]["EXPORT_COLUMNS"]: db_cols_for_shape.append(db_col) columns_to_serialize.append(db_col.key) q = DB.session.query(export_view.tableDef).filter( export_view.tableDef.columns.idSynthese.in_(id_list) ) # check R and E CRUVED to know if we filter with cruved cruved = cruved_scope_for_user_in_module(info_role.id_role, module_code="SYNTHESE")[ 0 ] if cruved["R"] > cruved["E"]: # filter on cruved specifying the column # id_dataset, id_synthese, id_digitiser and observer in the v_synthese_for_export_view q = synthese_query.filter_query_with_cruved( export_view.tableDef, q, info_role, id_synthese_column=current_app.config["SYNTHESE"]["EXPORT_ID_SYNTHESE_COL"], id_dataset_column=current_app.config["SYNTHESE"]["EXPORT_ID_DATASET_COL"], observers_column=current_app.config["SYNTHESE"]["EXPORT_OBSERVERS_COL"], id_digitiser_column=current_app.config["SYNTHESE"][ "EXPORT_ID_DIGITISER_COL" ], with_generic_table=True, ) results = q.limit(current_app.config["SYNTHESE"]["NB_MAX_OBS_EXPORT"]) file_name = datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S") file_name = filemanager.removeDisallowedFilenameChars(file_name) # columns = [db_col.key for db_col in export_view.db_cols] if export_format == "csv": formated_data = [ export_view.as_dict(d, columns=columns_to_serialize) for d in results ] return to_csv_resp( file_name, formated_data, separator=";", columns=columns_to_serialize ) elif export_format == "geojson": features = [] for r in results: geometry = ast.literal_eval( getattr(r, current_app.config["SYNTHESE"]["EXPORT_GEOJSON_4326_COL"]) ) feature = Feature( geometry=geometry, properties=export_view.as_dict(r, columns=columns_to_serialize), ) features.append(feature) results = FeatureCollection(features) return to_json_resp(results, as_file=True, filename=file_name, indent=4) else: try: filemanager.delete_recursively( str(ROOT_DIR / "backend/static/shapefiles"), excluded_files=[".gitkeep"] ) dir_path = str(ROOT_DIR / "backend/static/shapefiles") export_view.as_shape( db_cols=db_cols_for_shape, data=results, geojson_col=current_app.config["SYNTHESE"]["EXPORT_GEOJSON_LOCAL_COL"], dir_path=dir_path, file_name=file_name, ) return send_from_directory(dir_path, file_name + ".zip", as_attachment=True) except GeonatureApiError as e: message = str(e) return render_template( "error.html", error=message, redirect=current_app.config["URL_APPLICATION"] + "/#/synthese", )
def export(info_role): export_view_name = blueprint.config['export_view_name'] export_geom_column = blueprint.config['export_geom_columns_name'] export_id_column_name = blueprint.config['export_id_column_name'] export_columns = blueprint.config['export_columns'] export_srid = blueprint.config['export_srid'] export_view = GenericTable(export_view_name, 'pr_occtax', export_geom_column, export_srid) releve_repository = ReleveRepository(export_view) q = releve_repository.get_filtered_query(info_role, from_generic_table=True) q = get_query_occtax_filters(request.args, export_view, q, from_generic_table=True) data = q.all() file_name = datetime.datetime.now().strftime('%Y_%m_%d_%Hh%Mm%S') file_name = filemanager.removeDisallowedFilenameChars(file_name) export_format = request.args[ 'format'] if 'format' in request.args else 'geojson' if export_format == 'csv': columns = export_columns if len(export_columns) > 0 else [ db_col.key for db_col in export_view.db_cols ] return to_csv_resp(file_name, [export_view.as_dict(d) for d in data], columns, ';') elif export_format == 'geojson': results = FeatureCollection([ export_view.as_geofeature(d, columns=export_columns) for d in data ]) return to_json_resp(results, as_file=True, filename=file_name, indent=4) else: try: filemanager.delete_recursively(str(ROOT_DIR / 'backend/static/shapefiles'), excluded_files=['.gitkeep']) db_cols = [ db_col for db_col in export_view.db_cols if db_col.key in export_columns ] dir_path = str(ROOT_DIR / 'backend/static/shapefiles') export_view.as_shape( db_cols=db_cols, data=data, dir_path=dir_path, file_name=file_name, ) return send_from_directory(dir_path, file_name + '.zip', as_attachment=True) except GeonatureApiError as e: message = str(e) return render_template('error.html', error=message, redirect=current_app.config['URL_APPLICATION'] + "/#/occtax")
def export_ap(): """ Télécharge les données d'une aire de présence """ parameters = request.args export_format = (parameters["export_format"] if "export_format" in request.args else "shapefile") file_name = datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S") q = DB.session.query(ExportAp) if "indexap" in parameters: q = DB.session.query(ExportAp).filter( ExportAp.indexap == parameters["indexap"]) elif "indexzp" in parameters: q = DB.session.query(ExportAp).filter( ExportAp.id_base_site == parameters["indexzp"]) elif "organisme" in parameters: q = DB.session.query(ExportAp).filter( ExportAp.organisme == parameters["organisme"]) elif "commune" in parameters: q = DB.session.query(ExportAp).filter( ExportAp.area_name == parameters["commune"]) elif "year" in parameters: q = DB.session.query(ExportAp).filter( func.date_part("year", ExportAp.visit_date) == parameters["year"]) elif "cd_nom" in parameters: q = DB.session.query(ExportAp).filter( ExportAp.cd_nom == parameters["cd_nom"]) data = q.all() features = [] if export_format == "geojson": for d in data: feature = d.as_geofeature("geom_local", "indexap", False) features.append(feature) result = FeatureCollection(features) return to_json_resp(result, as_file=True, filename=file_name, indent=4) elif export_format == "csv": tab_ap = [] for d in data: ap = d.as_dict() geom_wkt = to_shape(d.geom_local) ap["geom_local"] = geom_wkt tab_ap.append(ap) return to_csv_resp(file_name, tab_ap, tab_ap[0].keys(), ";") else: dir_path = str(ROOT_DIR / "backend/static/shapefiles") FionaShapeService.create_shapes_struct( db_cols=ExportAp.__mapper__.c, srid=2154, dir_path=dir_path, file_name=file_name, ) for row in data: FionaShapeService.create_feature(row.as_dict(), row.geom_local) FionaShapeService.save_and_zip_shapefiles() return send_from_directory(dir_path, file_name + ".zip", as_attachment=True)
def getOneExportThread(id_export, export_format, info_role): """ Run export with thread """ # test if export exists if (id_export < 1 or export_format not in blueprint.config.get('export_format_map')): return to_json_resp( { 'api_error': 'invalid_export', 'message': 'Invalid export or export not found' }, status=404) current_app.config.update( export_format_map=blueprint.config['export_format_map']) filters = {f: request.args.get(f) for f in request.args} data = dict(request.get_json()) # alternative email in payload tmp_user = User() if 'email' in data: tmp_user.email = data['email'] try: @copy_current_request_context def get_data(id_export, export_format, info_role, filters, user): thread_export_data(id_export, export_format, info_role, filters, user) # Test if export is allowed try: repo.get_export_is_allowed(id_export, info_role) except Exception: return to_json_resp({'message': "Not Allowed"}, status=403) # Test if user have an email try: user = (DB.session.query(User).filter( User.id_role == info_role.id_role).one()) if not user.email and not tmp_user.email: return to_json_resp( { 'api_error': 'no_email', 'message': "User doesn't have email" }, status=500) except NoResultFound: return to_json_resp( { 'api_error': 'no_user', 'message': "User doesn't exist" }, status=404) # Run export a = threading.Thread(name="export_data", target=get_data, kwargs={ "id_export": id_export, "export_format": export_format, "info_role": info_role, "filters": filters, "user": tmp_user if (tmp_user.email) else user }) a.start() return to_json_resp( { 'api_success': 'in_progress', 'message': 'The Process is in progress ! You will receive an email shortly' }, # noqua status=200) except Exception as e: LOGGER.critical('%s', e) if current_app.config['DEBUG']: raise return to_json_resp({'api_error': 'logged_error'}, status=400)