def export_all_observations(module_code, type, method,jd): """ Export all the observations made on a site group. Following formats are available: * csv * geojson * shapefile """ view = GenericTableGeo( tableName="v_export_" + module_code.lower()+"_"+method, schemaName="gn_monitoring", engine=DB.engine ) columns = view.tableDef.columns q = DB.session.query(*columns) #data = q.all() #---------------------------- data = DB.session.query(*columns).filter(columns.id_dataset == jd).all() #------------------------------------- filename = module_code+"_"+method+"_"+dt.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S") if type == 'csv': return to_csv_resp( filename, data=serializeQuery(data, q.column_descriptions), separator=";", columns=[db_col.key for db_col in columns if db_col.key != 'geom'], # Exclude the geom column from CSV ) else: raise NotFound("type export not found")
def uuid_report(info_role): """ get the UUID report of a dataset .. :quickref: Metadata; """ params = request.args ds_id = params.get("id_dataset") id_import = params.get("id_import") id_module = params.get("id_module") query = DB.session.query(Synthese).select_from(Synthese) if id_module: query = query.filter(Synthese.id_module == id_module) if ds_id: query = query.filter(Synthese.id_dataset == ds_id) if id_import: query = query.outerjoin(TSources, TSources.id_source == Synthese.id_source).filter( TSources.name_source == "Import(id={})".format(id_import) ) data = query.all() data = [ { "identifiantOrigine": row.entity_source_pk_value, "identifiant_gn": row.id_synthese, "identifiantPermanent (SINP)": row.unique_id_sinp, "nomcite": row.nom_cite, "jourDateDebut": row.date_min, "jourDatefin": row.date_max, "observateurIdentite": row.observers, } for row in query.all() ] return to_csv_resp( filename="filename", data=data, columns=[ "identifiantOrigine", "identifiant_gn", "identifiantPermanent (SINP)", "nomcite", "jourDateDebut", "jourDatefin", "observateurIdentite", ], )
def export_metadata(info_role): """Route to export the metadata in CSV .. :quickref: Synthese; The table synthese is join with gn_synthese.v_metadata_for_export The column jdd_id is mandatory in the view gn_synthese.v_metadata_for_export POST parameters: Use a list of id_synthese (in POST parameters) to filter the v_synthese_for_export_view """ if request.json: filters = request.json elif request.data: # decode byte to str - compat python 3.5 filters = json.loads(request.data.decode("utf-8")) else: filters = { key: request.args.getlist(key) for key, value in request.args.items() } metadata_view = GenericTable(tableName="v_metadata_for_export", schemaName="gn_synthese", engine=DB.engine) q = DB.session.query(distinct(VSyntheseForWebApp.id_dataset), metadata_view.tableDef).join( metadata_view.tableDef, getattr( metadata_view.tableDef.columns, current_app.config["SYNTHESE"] ["EXPORT_METADATA_ID_DATASET_COL"], ) == VSyntheseForWebApp.id_dataset, ) q = select( [distinct(VSyntheseForWebApp.id_dataset), metadata_view.tableDef]) synthese_query_class = SyntheseQuery(VSyntheseForWebApp, q, filters) synthese_query_class.add_join( metadata_view.tableDef, getattr( metadata_view.tableDef.columns, current_app.config["SYNTHESE"]["EXPORT_METADATA_ID_DATASET_COL"], ), VSyntheseForWebApp.id_dataset) synthese_query_class.filter_query_all_filters(info_role) data = DB.engine.execute(synthese_query_class.query) return to_csv_resp( datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S"), data=[metadata_view.as_dict(d) for d in data], separator=";", columns=[db_col.key for db_col in metadata_view.tableDef.columns], )
def export_metadata(info_role): """Route to export the metadata in CSV .. :quickref: Synthese; The table synthese is join with gn_synthese.v_metadata_for_export The column jdd_id is mandatory in the view gn_synthese.v_metadata_for_export POST parameters: Use a list of id_synthese (in POST parameters) to filter the v_synthese_for_export_view """ filters = {key: request.args.getlist(key) for key, value in request.args.items()} metadata_view = GenericTable( tableName="v_metadata_for_export", schemaName="gn_synthese", engine=DB.engine ) q = DB.session.query( distinct(VSyntheseForWebApp.id_dataset), metadata_view.tableDef ).join( metadata_view.tableDef, getattr( metadata_view.tableDef.columns, current_app.config["SYNTHESE"]["EXPORT_METADATA_ID_DATASET_COL"], ) == VSyntheseForWebApp.id_dataset, ) q = synthese_query.filter_query_all_filters( VSyntheseForWebApp, q, filters, info_role ) return to_csv_resp( datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S"), data=[metadata_view.as_dict(d) for d in q.all()], separator=";", columns=[db_col.key for db_col in metadata_view.tableDef.columns], )
def export_all_habitats( info_role, export_format="csv", ): """ Download all stations The route is in post to avoid a too large query string .. :quickref: Occhab; """ data = request.get_json() export_view = GenericTableGeo( tableName="v_export_sinp", schemaName="pr_occhab", engine=DB.engine, geometry_field="geom_local", srid=current_app.config["LOCAL_SRID"], ) file_name = datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S") file_name = filemanager.removeDisallowedFilenameChars(file_name) db_cols_for_shape = [] columns_to_serialize = [] for db_col in export_view.db_cols: if db_col.key in blueprint.config["EXPORT_COLUMS"]: if db_col.key != "geometry": db_cols_for_shape.append(db_col) columns_to_serialize.append(db_col.key) results = ( DB.session.query(export_view.tableDef) .filter(export_view.tableDef.columns.id_station.in_(data["idsStation"])) .limit(blueprint.config["NB_MAX_EXPORT"]) ) if export_format == "csv": formated_data = [export_view.as_dict(d, fields=[]) for d in results] return to_csv_resp( file_name, formated_data, separator=";", columns=columns_to_serialize ) elif export_format == "geojson": features = [] for r in results: features.append( Feature( geometry=json.loads(r.geojson), properties=export_view.as_dict(r, fields=columns_to_serialize), ) ) return to_json_resp( FeatureCollection(features), as_file=True, filename=file_name, indent=4 ) else: try: dir_name, file_name = export_as_geo_file( export_format=export_format, export_view=export_view, db_cols=db_cols_for_shape, geojson_col=None, data=results, file_name=file_name, ) return send_from_directory(dir_name, file_name, as_attachment=True) except GeonatureApiError as e: message = str(e) return render_template( "error.html", error=message, redirect=current_app.config["URL_APPLICATION"] + "/#/" + blueprint.config["MODULE_URL"], )
def export(info_role): """Export data from pr_occtax.v_export_occtax view (parameter) .. :quickref: Occtax; Export data from pr_occtax.v_export_occtax :query str format: format of the export ('csv', 'geojson', 'shapefile') """ export_view_name = blueprint.config["export_view_name"] export_geom_column = blueprint.config["export_geom_columns_name"] export_columns = blueprint.config["export_columns"] export_srid = blueprint.config["export_srid"] export_view = GenericTableGeo( tableName=export_view_name, schemaName="pr_occtax", engine=DB.engine, geometry_field=export_geom_column, srid=export_srid, ) releve_repository = ReleveRepository(export_view) q = releve_repository.get_filtered_query(info_role, from_generic_table=True) q = get_query_occtax_filters( request.args, export_view, q, from_generic_table=True, obs_txt_column=blueprint.config["export_observer_txt_column"], ) data = q.all() file_name = datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S") file_name = filemanager.removeDisallowedFilenameChars(file_name) export_format = request.args["format"] if "format" in request.args else "geojson" if export_format == "csv": columns = ( export_columns if len(export_columns) > 0 else [db_col.key for db_col in export_view.db_cols] ) return to_csv_resp( file_name, [export_view.as_dict(d) for d in data], columns, ";" ) elif export_format == "geojson": results = FeatureCollection( [export_view.as_geofeature(d, columns=export_columns) for d in data] ) return to_json_resp( results, as_file=True, filename=file_name, indent=4, extension="geojson" ) else: try: db_cols = [ db_col for db_col in export_view.db_cols if db_col.key in export_columns ] dir_name, file_name = export_as_geo_file( export_format=export_format, export_view=export_view, db_cols=db_cols, geojson_col=None, data=data, file_name=file_name, ) return send_from_directory(dir_name, file_name, as_attachment=True) except GeonatureApiError as e: message = str(e) return render_template( "error.html", error=message, redirect=current_app.config["URL_APPLICATION"] + "/#/occtax", )
def export_status(info_role): """Route to get all the protection status of a synthese search .. :quickref: Synthese; Get the CRUVED from 'R' action because we don't give observations X/Y but only statuts and to be constistant with the data displayed in the web interface Parameters: - HTTP-GET: the same that the /synthese endpoint (all the filter in web app) """ filters = {key: request.args.getlist(key) for key, value in request.args.items()} # initalize the select object q = select( [ distinct(VSyntheseForWebApp.cd_nom), Taxref.nom_complet, Taxref.cd_ref, Taxref.nom_vern, TaxrefProtectionArticles.type_protection, TaxrefProtectionArticles.article, TaxrefProtectionArticles.intitule, TaxrefProtectionArticles.arrete, TaxrefProtectionArticles.date_arrete, TaxrefProtectionArticles.url, ] ) synthese_query_class = SyntheseQuery(VSyntheseForWebApp, q, filters) # add join synthese_query_class.add_join( Taxref, Taxref.cd_nom, VSyntheseForWebApp.cd_nom) synthese_query_class.add_join( TaxrefProtectionEspeces, TaxrefProtectionEspeces.cd_nom, VSyntheseForWebApp.cd_nom, ) synthese_query_class.add_join( TaxrefProtectionArticles, TaxrefProtectionArticles.cd_protection, TaxrefProtectionEspeces.cd_protection, ) # filter with all get params q = synthese_query_class.filter_query_all_filters(info_role) data = DB.engine.execute(q) protection_status = [] for d in data: row = OrderedDict( [ ("nom_complet", d["nom_complet"]), ("nom_vern", d["nom_vern"]), ("cd_nom", d["cd_nom"]), ("cd_ref", d["cd_ref"]), ("type_protection", d["type_protection"]), ("article", d["article"]), ("intitule", d["intitule"]), ("arrete", d["arrete"]), ("date_arrete", d["date_arrete"]), ("url", d["url"]), ] ) protection_status.append(row) export_columns = [ "nom_complet", "nom_vern", "cd_nom", "cd_ref", "type_protection", "article", "intitule", "arrete", "date_arrete", "url", ] return to_csv_resp( datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S"), protection_status, separator=";", columns=export_columns, )
def export_observations_web(info_role): """Optimized route for observations web export. .. :quickref: Synthese; This view is customisable by the administrator Some columns are mandatory: id_synthese, geojson and geojson_local to generate the exported files POST parameters: Use a list of id_synthese (in POST parameters) to filter the v_synthese_for_export_view :query str export_format: str<'csv', 'geojson', 'shapefiles'> """ params = request.args # set default to csv export_format = "csv" export_view = GenericTableGeo( tableName="v_synthese_for_export", schemaName="gn_synthese", engine=DB.engine, geometry_field=None, srid=current_app.config["LOCAL_SRID"], ) if "export_format" in params: export_format = params["export_format"] # get list of id synthese from POST id_list = request.get_json() db_cols_for_shape = [] columns_to_serialize = [] # loop over synthese config to get the columns for export for db_col in export_view.db_cols: if db_col.key in current_app.config["SYNTHESE"]["EXPORT_COLUMNS"]: db_cols_for_shape.append(db_col) columns_to_serialize.append(db_col.key) q = DB.session.query(export_view.tableDef).filter( export_view.tableDef.columns[ current_app.config["SYNTHESE"]["EXPORT_ID_SYNTHESE_COL"] ].in_(id_list) ) # check R and E CRUVED to know if we filter with cruved cruved = cruved_scope_for_user_in_module(info_role.id_role, module_code="SYNTHESE")[ 0 ] if cruved["R"] > cruved["E"]: # filter on cruved specifying the column # id_dataset, id_synthese, id_digitiser and observer in the v_synthese_for_export_view q = synthese_query.filter_query_with_cruved( export_view.tableDef, q, info_role, id_synthese_column=current_app.config["SYNTHESE"]["EXPORT_ID_SYNTHESE_COL"], id_dataset_column=current_app.config["SYNTHESE"]["EXPORT_ID_DATASET_COL"], observers_column=current_app.config["SYNTHESE"]["EXPORT_OBSERVERS_COL"], id_digitiser_column=current_app.config["SYNTHESE"][ "EXPORT_ID_DIGITISER_COL" ], with_generic_table=True, ) results = q.limit(current_app.config["SYNTHESE"]["NB_MAX_OBS_EXPORT"]) file_name = datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S") file_name = filemanager.removeDisallowedFilenameChars(file_name) # columns = [db_col.key for db_col in export_view.db_cols] if export_format == "csv": formated_data = [ export_view.as_dict(d, columns=columns_to_serialize) for d in results ] return to_csv_resp( file_name, formated_data, separator=";", columns=columns_to_serialize ) elif export_format == "geojson": features = [] for r in results: geometry = ast.literal_eval( getattr( r, current_app.config["SYNTHESE"]["EXPORT_GEOJSON_4326_COL"]) ) feature = Feature( geometry=geometry, properties=export_view.as_dict( r, columns=columns_to_serialize), ) features.append(feature) results = FeatureCollection(features) return to_json_resp(results, as_file=True, filename=file_name, indent=4) else: try: filemanager.delete_recursively( str(ROOT_DIR / "backend/static/shapefiles"), excluded_files=[".gitkeep"] ) dir_path = str(ROOT_DIR / "backend/static/shapefiles") export_view.as_shape( db_cols=db_cols_for_shape, data=results, geojson_col=current_app.config["SYNTHESE"]["EXPORT_GEOJSON_LOCAL_COL"], dir_path=dir_path, file_name=file_name, ) return send_from_directory(dir_path, file_name + ".zip", as_attachment=True) except GeonatureApiError as e: message = str(e) return render_template( "error.html", error=message, redirect=current_app.config["URL_APPLICATION"] + "/#/synthese", )
def export_taxon_web(info_role): """Optimized route for taxon web export. .. :quickref: Synthese; This view is customisable by the administrator Some columns are mandatory: cd_ref POST parameters: Use a list of cd_ref (in POST parameters) to filter the v_synthese_taxon_for_export_view :query str export_format: str<'csv'> """ taxon_view = GenericTable( tableName="v_synthese_taxon_for_export_view", schemaName="gn_synthese", engine=DB.engine ) columns = taxon_view.tableDef.columns # Test de conformité de la vue v_synthese_for_export_view try: assert hasattr(taxon_view.tableDef.columns, "cd_ref") except AssertionError as e: return {"msg": """ View v_synthese_taxon_for_export_view must have a cd_ref column \n trace: {} """.format(str(e)) }, 500 id_list = request.get_json() # check R and E CRUVED to know if we filter with cruved cruved = cruved_scope_for_user_in_module( info_role.id_role, module_code="SYNTHESE" )[0] subq = DB.session.query( VSyntheseForWebApp.cd_ref, func.count(distinct( VSyntheseForWebApp.id_synthese )).label("nb_obs"), func.min(VSyntheseForWebApp.date_min).label("date_min"), func.max(VSyntheseForWebApp.date_max).label("date_max") ).filter( VSyntheseForWebApp.id_synthese.in_(id_list) ).group_by(VSyntheseForWebApp.cd_ref) if cruved["R"] > cruved["E"]: # filter on cruved specifying the column # id_dataset, id_synthese, id_digitiser # and observer in the v_synthese_for_export_view subq = synthese_query.filter_query_with_cruved( VSyntheseForWebApp, subq, info_role, id_synthese_column="id_synthese", id_dataset_column="id_dataset", observers_column="observers", id_digitiser_column="id_digitiser", with_generic_table=False, ) subq = subq.subquery() q = DB.session.query( *columns, subq.c.nb_obs, subq.c.date_min, subq.c.date_max ).join( subq, subq.c.cd_ref == columns.cd_ref ) return to_csv_resp( datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S"), data=serializeQuery(q.all(), q.column_descriptions), separator=";", columns=[db_col.key for db_col in columns] + ["nb_obs", "date_min", "date_max"] )
def export(info_role): """Export data from pr_occtax.v_export_occtax view (parameter) .. :quickref: Occtax; Export data from pr_occtax.v_export_occtax :query str format: format of the export ('csv', 'geojson', 'shapefile', 'gpkg') """ export_view_name = blueprint.config["export_view_name"] export_geom_column = blueprint.config["export_geom_columns_name"] export_columns = blueprint.config["export_columns"] export_srid = blueprint.config["export_srid"] export_format = request.args[ "format"] if "format" in request.args else "geojson" export_col_name_additional_data = blueprint.config[ "export_col_name_additional_data"] export_view = GenericTableGeo( tableName=export_view_name, schemaName="pr_occtax", engine=DB.engine, geometry_field=export_geom_column, srid=export_srid, ) columns = (export_columns if len(export_columns) > 0 else [db_col.key for db_col in export_view.db_cols]) releve_repository = ReleveRepository(export_view) q = releve_repository.get_filtered_query(info_role, from_generic_table=True) q = get_query_occtax_filters( request.args, export_view, q, from_generic_table=True, obs_txt_column=blueprint.config["export_observer_txt_column"], ) if current_app.config["OCCTAX"]["ADD_MEDIA_IN_EXPORT"]: q, columns = releve_repository.add_media_in_export(q, columns) data = q.all() file_name = datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S") file_name = filemanager.removeDisallowedFilenameChars(file_name) #Ajout des colonnes additionnels additional_col_names = [] query_add_fields = DB.session.query(TAdditionalFields).filter( TAdditionalFields.modules.any(module_code="OCCTAX")).filter( TAdditionalFields.exportable == True) global_add_fields = query_add_fields.filter( ~TAdditionalFields.datasets.any()).all() if "id_dataset" in request.args: dataset_add_fields = query_add_fields.filter( TAdditionalFields.datasets.any( id_dataset=request.args['id_dataset'])).all() global_add_fields = [*global_add_fields, *dataset_add_fields] additional_col_names = [field.field_name for field in global_add_fields] if export_format == "csv": # set additional data col at the end (remove it and inset it ...) columns.remove(export_col_name_additional_data) columns = columns + additional_col_names columns.append(export_col_name_additional_data) if additional_col_names: serialize_result = [ as_dict_with_add_cols(export_view, row, export_col_name_additional_data, additional_col_names) for row in data ] else: serialize_result = [export_view.as_dict(row) for row in data] return to_csv_resp(file_name, serialize_result, columns, ";") elif export_format == "geojson": if additional_col_names: features = [] for row in data: properties = as_dict_with_add_cols( export_view, row, export_col_name_additional_data, additional_col_names) feature = Feature(properties=properties, geometry=to_shape( getattr(row, export_geom_column))) features.append(feature) serialize_result = FeatureCollection(features) else: serialize_result = FeatureCollection([ export_view.as_geofeature(d, fields=export_columns) for d in data ]) return to_json_resp(serialize_result, as_file=True, filename=file_name, indent=4, extension="geojson") else: try: db_cols = [ db_col for db_col in export_view.db_cols if db_col.key in export_columns ] dir_name, file_name = export_as_geo_file( export_format=export_format, export_view=export_view, db_cols=db_cols, geojson_col=None, data=data, file_name=file_name, ) db_cols = [ db_col for db_col in export_view.db_cols if db_col.key in export_columns ] return send_from_directory(dir_name, file_name, as_attachment=True) except GeonatureApiError as e: message = str(e) return render_template( "error.html", error=message, redirect=current_app.config["URL_APPLICATION"] + "/#/occtax", )
def export(info_role): """Export data from pr_occtax.export_occtax_sinp view (parameter) .. :quickref: Occtax; Export data from pr_occtax.export_occtax_sinp :query str format: format of the export ('csv', 'geojson', 'shapefile') """ export_view_name = blueprint.config["export_view_name"] export_geom_column = blueprint.config["export_geom_columns_name"] export_id_column_name = blueprint.config["export_id_column_name"] export_columns = blueprint.config["export_columns"] export_srid = blueprint.config["export_srid"] export_view = GenericTable(export_view_name, "pr_occtax", export_geom_column, export_srid) releve_repository = ReleveRepository(export_view) q = releve_repository.get_filtered_query(info_role, from_generic_table=True) q = get_query_occtax_filters( request.args, export_view, q, from_generic_table=True, obs_txt_column=blueprint.config['export_observer_txt_column']) data = q.all() file_name = datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S") file_name = filemanager.removeDisallowedFilenameChars(file_name) export_format = request.args[ "format"] if "format" in request.args else "geojson" if export_format == "csv": columns = (export_columns if len(export_columns) > 0 else [db_col.key for db_col in export_view.db_cols]) return to_csv_resp(file_name, [export_view.as_dict(d) for d in data], columns, ";") elif export_format == "geojson": results = FeatureCollection([ export_view.as_geofeature(d, columns=export_columns) for d in data ]) return to_json_resp(results, as_file=True, filename=file_name, indent=4, extension="geojson") else: try: filemanager.delete_recursively(str(ROOT_DIR / "backend/static/shapefiles"), excluded_files=[".gitkeep"]) db_cols = [ db_col for db_col in export_view.db_cols if db_col.key in export_columns ] dir_path = str(ROOT_DIR / "backend/static/shapefiles") export_view.as_shape(db_cols=db_cols, data=data, dir_path=dir_path, file_name=file_name) return send_from_directory(dir_path, file_name + ".zip", as_attachment=True) except GeonatureApiError as e: message = str(e) return render_template( "error.html", error=message, redirect=current_app.config["URL_APPLICATION"] + "/#/occtax", )
def export(info_role): """Export data from pr_occtax.export_occtax_sinp view (parameter) .. :quickref: Occtax; Export data from pr_occtax.export_occtax_sinp :query str format: format of the export ('csv', 'geojson', 'shapefile', 'medias') """ export_view_name = blueprint.config["export_view_name"] export_geom_column = blueprint.config["export_geom_columns_name"] export_columns = blueprint.config["export_columns"] export_srid = blueprint.config["export_srid"] export_view = GenericTableGeo( tableName=export_view_name, schemaName="pr_occtax", engine=DB.engine, geometry_field=export_geom_column, srid=export_srid, ) releve_repository = ReleveRepository(export_view) q = releve_repository.get_filtered_query(info_role, from_generic_table=True) q = get_query_occtax_filters( request.args, export_view, q, from_generic_table=True, obs_txt_column=blueprint.config["export_observer_txt_column"], ) data = q.all() file_name = datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S") file_name = filemanager.removeDisallowedFilenameChars(file_name) export_format = request.args[ "format"] if "format" in request.args else "geojson" if export_format == "csv": columns = (export_columns if len(export_columns) > 0 else [db_col.key for db_col in export_view.db_cols]) return to_csv_resp(file_name, [export_view.as_dict(d) for d in data], columns, ";") elif export_format == "geojson": results = FeatureCollection([ export_view.as_geofeature(d, columns=export_columns) for d in data ]) return to_json_resp(results, as_file=True, filename=file_name, indent=4, extension="geojson") #MET 21/10/2020 Ajout d'un export medias elif export_format == "medias": try: releve_repository_for_media = ReleveRepository(TRelevesOccurrence) q = releve_repository_for_media.get_filtered_query(info_role) parameters = request.args # Filters q = get_query_occtax_filters(parameters, TRelevesOccurrence, q) data = q.all() user = info_role user_cruved = get_or_fetch_user_cruved(session=session, id_role=info_role.id_role, module_code="OCCTAX") #on crée le dossier s'il n'existe pas dir_path = str(ROOT_DIR / "backend/static/medias/exports") if not os.path.exists(dir_path): os.makedirs(dir_path) #on le clean filemanager.delete_recursively(dir_path) featureCollection = [] zip_path = dir_path + "/" + file_name + ".zip" zp_file = zipfile.ZipFile(zip_path, mode="w") for n in data: releve_cruved = n.get_releve_cruved(user, user_cruved) feature = n.get_geofeature( relationships=('t_occurrences_occtax', 'cor_counting_occtax', 'medias')) if 'properties' in feature: if 't_occurrences_occtax' in feature['properties']: for occurence in feature['properties'][ 't_occurrences_occtax']: for counting in occurence['cor_counting_occtax']: if 'medias' in counting: for media in counting['medias']: if media['media_path'] is not None: file_path = str( ROOT_DIR / "backend/" ) + "/" + media['media_path'] if os.path.exists(file_path): zp_file.write( file_path, os.path.basename( file_path)) #copyfile( file_path, dir_path + "/" + os.path.basename(file_path)) #featureCollection.append(file_path) zp_file.close() return send_from_directory(dir_path, file_name + ".zip", as_attachment=True) except GeonatureApiError as e: message = str(e) else: try: filemanager.delete_recursively(str(ROOT_DIR / "backend/static/shapefiles"), excluded_files=[".gitkeep"]) db_cols = [ db_col for db_col in export_view.db_cols if db_col.key in export_columns ] dir_path = str(ROOT_DIR / "backend/static/shapefiles") export_view.as_shape(db_cols=db_cols, data=data, dir_path=dir_path, file_name=file_name) return send_from_directory(dir_path, file_name + ".zip", as_attachment=True) except GeonatureApiError as e: message = str(e) return render_template( "error.html", error=message, redirect=current_app.config["URL_APPLICATION"] + "/#/occtax", )
def export_all_habitats(info_role, export_format='csv',): """ Download all stations The route is in post to avoid a too large query string .. :quickref: Occhab; """ data = request.get_json() export_view = GenericTable( tableName="v_export_sinp", schemaName="pr_occhab", geometry_field=None, srid=current_app.config["LOCAL_SRID"], ) file_name = datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S") file_name = filemanager.removeDisallowedFilenameChars(file_name) db_cols_for_shape = [] columns_to_serialize = [] for db_col in export_view.db_cols: if db_col.key in blueprint.config['EXPORT_COLUMS']: if db_col.key != 'geometry': db_cols_for_shape.append(db_col) columns_to_serialize.append(db_col.key) results = DB.session.query(export_view.tableDef).filter( export_view.tableDef.columns.id_station.in_(data['idsStation']) ).limit( blueprint.config['NB_MAX_EXPORT'] ) if export_format == 'csv': formated_data = [ export_view.as_dict(d, columns=[]) for d in results ] return to_csv_resp( file_name, formated_data, separator=";", columns=columns_to_serialize ) elif export_format == 'geojson': features = [] for r in results: features.append( Feature( geometry=json.loads(r.geojson), properties=export_view.as_dict( r, columns=columns_to_serialize) ) ) return to_json_resp( FeatureCollection(features), as_file=True, filename=file_name, indent=4 ) else: try: filemanager.delete_recursively( str(ROOT_DIR / "backend/static/shapefiles"), excluded_files=[".gitkeep"] ) dir_path = str(ROOT_DIR / "backend/static/shapefiles") export_view.as_shape( db_cols=db_cols_for_shape, data=results, geojson_col="geojson", dir_path=dir_path, file_name=file_name, ) return send_from_directory(dir_path, file_name + ".zip", as_attachment=True) except GeonatureApiError as e: message = str(e) return render_template( "error.html", error=message, redirect=current_app.config["URL_APPLICATION"] + "/#/" + blueprint.config['MODULE_URL'], )
def export_observations_web(info_role): """Optimized route for observations web export. .. :quickref: Synthese; This view is customisable by the administrator Some columns are mandatory: id_synthese, geojson and geojson_local to generate the exported files POST parameters: Use a list of id_synthese (in POST parameters) to filter the v_synthese_for_export_view :query str export_format: str<'csv', 'geojson', 'shapefiles', 'gpkg'> """ params = request.args export_format = params.get("export_format", "csv") # Test export_format if not export_format in current_app.config["SYNTHESE"]["EXPORT_FORMAT"]: raise BadRequest("Unsupported format") # set default to csv export_view = GenericTableGeo( tableName="v_synthese_for_export", schemaName="gn_synthese", engine=DB.engine, geometry_field=None, srid=current_app.config["LOCAL_SRID"], ) # get list of id synthese from POST id_list = request.get_json() db_cols_for_shape = [] columns_to_serialize = [] # loop over synthese config to get the columns for export for db_col in export_view.db_cols: if db_col.key in current_app.config["SYNTHESE"]["EXPORT_COLUMNS"]: db_cols_for_shape.append(db_col) columns_to_serialize.append(db_col.key) query = select([export_view.tableDef]).where(export_view.tableDef.columns[ current_app.config["SYNTHESE"]["EXPORT_ID_SYNTHESE_COL"]].in_(id_list)) synthese_query_class = SyntheseQuery( export_view.tableDef, query, {}, id_synthese_column=current_app.config["SYNTHESE"] ["EXPORT_ID_SYNTHESE_COL"], id_dataset_column=current_app.config["SYNTHESE"] ["EXPORT_ID_DATASET_COL"], observers_column=current_app.config["SYNTHESE"] ["EXPORT_OBSERVERS_COL"], id_digitiser_column=current_app.config["SYNTHESE"] ["EXPORT_ID_DIGITISER_COL"], with_generic_table=True, ) # check R and E CRUVED to know if we filter with cruved cruved = cruved_scope_for_user_in_module(info_role.id_role, module_code="SYNTHESE")[0] if cruved["R"] > cruved["E"]: synthese_query_class.filter_query_with_cruved(info_role) results = DB.session.execute( synthese_query_class.query.limit( current_app.config["SYNTHESE"]["NB_MAX_OBS_EXPORT"])) file_name = datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S") file_name = filemanager.removeDisallowedFilenameChars(file_name) if export_format == "csv": formated_data = [ export_view.as_dict(d, columns=columns_to_serialize) for d in results ] return to_csv_resp(file_name, formated_data, separator=";", columns=columns_to_serialize) elif export_format == "geojson": features = [] for r in results: geometry = json.loads( getattr( r, current_app.config["SYNTHESE"]["EXPORT_GEOJSON_4326_COL"])) feature = Feature( geometry=geometry, properties=export_view.as_dict(r, columns=columns_to_serialize), ) features.append(feature) results = FeatureCollection(features) return to_json_resp(results, as_file=True, filename=file_name, indent=4) else: try: dir_name, file_name = export_as_geo_file( export_format=export_format, export_view=export_view, db_cols=db_cols_for_shape, geojson_col=current_app.config["SYNTHESE"] ["EXPORT_GEOJSON_LOCAL_COL"], data=results, file_name=file_name, ) return send_from_directory(dir_name, file_name, as_attachment=True) except GeonatureApiError as e: message = str(e) return render_template( "error.html", error=message, redirect=current_app.config["URL_APPLICATION"] + "/#/synthese", )