def general_stats(info_role): """Return stats about synthese. .. :quickref: Synthese; - nb of observations - nb of distinct species - nb of distinct observer - nb of datasets """ allowed_datasets = get_datasets_cruved(info_role) q = select( [ func.count(Synthese.id_synthese), func.count(func.distinct(Synthese.cd_nom)), func.count(func.distinct(Synthese.observers)) ] ) synthese_query_obj = SyntheseQuery(Synthese, q, {}) synthese_query_obj.filter_query_with_cruved(info_role) result = DB.session.execute(synthese_query_obj.query) synthese_counts = result.fetchone() data = { "nb_data": synthese_counts[0], "nb_species": synthese_counts[1], "nb_observers": synthese_counts[2], "nb_dataset": len(allowed_datasets), } return data
def get_observations_for_web(info_role): """ Optimized route for serve data to the frontend with all filters Parameters: - GET : all the fields of the view v_synthese_for_export Return: Array of dict (with geojson key) """ filters = { key: request.args.getlist(key) for key, value in request.args.items() } if "limit" in filters: result_limit = filters.pop("limit")[0] else: result_limit = current_app.config["SYNTHESE"]["NB_MAX_OBS_MAP"] query = (select([ VSyntheseForWebApp.id_synthese, VSyntheseForWebApp.date_min, VSyntheseForWebApp.lb_nom, VSyntheseForWebApp.cd_nom, VSyntheseForWebApp.nom_vern, VSyntheseForWebApp.st_asgeojson, VSyntheseForWebApp.observers, VSyntheseForWebApp.dataset_name, VSyntheseForWebApp.url_source, VSyntheseForWebApp.entity_source_pk_value, ]).where(VSyntheseForWebApp.the_geom_4326.isnot(None)).order_by( VSyntheseForWebApp.date_min.desc())) synthese_query_class = SyntheseQuery(VSyntheseForWebApp, query, filters) synthese_query_class.filter_query_all_filters(info_role) result = DB.engine.execute(synthese_query_class.query.limit(result_limit)) geojson_features = [] for r in result: properties = { "id": r["id_synthese"], "date_min": str(r["date_min"]), "cd_nom": r["cd_nom"], "nom_vern_or_lb_nom": r["nom_vern"] if r["nom_vern"] else r["lb_nom"], "lb_nom": r["lb_nom"], "dataset_name": r["dataset_name"], "observers": r["observers"], "url_source": r["url_source"], "entity_source_pk_value": r["entity_source_pk_value"], } geojson = ast.literal_eval(r["st_asgeojson"]) geojson["properties"] = properties geojson_features.append(geojson) return { "data": FeatureCollection(geojson_features), "nb_total": len(geojson_features), "nb_obs_limited": len(geojson_features) == current_app.config["SYNTHESE"] ["NB_MAX_OBS_MAP"], }
def get_observations_for_web(info_role): """ Optimized route for serve data to the frontend with all filters Parameters: - GET : all the fields of the view v_synthese_for_export Return: Array of dict (with geojson key) """ filters = {key: request.args.getlist(key) for key, value in request.args.items()} if "limit" in filters: result_limit = filters.pop("limit")[0] else: result_limit = current_app.config["SYNTHESE"]["NB_MAX_OBS_MAP"] query = ( select( [ VSyntheseForWebApp.id_synthese, VSyntheseForWebApp.date_min, VSyntheseForWebApp.lb_nom, VSyntheseForWebApp.cd_nom, VSyntheseForWebApp.nom_vern, VSyntheseForWebApp.st_asgeojson, VSyntheseForWebApp.observers, VSyntheseForWebApp.dataset_name, VSyntheseForWebApp.url_source, VSyntheseForWebApp.entity_source_pk_value, ] ) .where(VSyntheseForWebApp.the_geom_4326.isnot(None)) .order_by(VSyntheseForWebApp.date_min.desc()) ) synthese_query_class = SyntheseQuery(VSyntheseForWebApp, query, filters) synthese_query_class.filter_query_all_filters(info_role) result = DB.engine.execute(synthese_query_class.query.limit(result_limit)) geojson_features = [] for r in result: properties = { "id": r["id_synthese"], "date_min": str(r["date_min"]), "cd_nom": r["cd_nom"], "nom_vern_or_lb_nom": r["nom_vern"] if r["nom_vern"] else r["lb_nom"], "lb_nom": r["lb_nom"], "dataset_name": r["dataset_name"], "observers": r["observers"], "url_source": r["url_source"], "entity_source_pk_value": r["entity_source_pk_value"], } geojson = ast.literal_eval(r["st_asgeojson"]) geojson["properties"] = properties geojson_features.append(geojson) return { "data": FeatureCollection(geojson_features), "nb_total": len(geojson_features), "nb_obs_limited": len(geojson_features) == current_app.config["SYNTHESE"]["NB_MAX_OBS_MAP"], }
def get_synthese(info_role): """Return synthese row(s) filtered by form params. NOT USED ANY MORE FOR PERFORMANCE ISSUES .. :quickref: Synthese; Deprecated .. deprecated:: 2? Use :route: /for_web instead Params must have same synthese fields names :parameter str info_role: Role used to get the associated filters :returns dict[dict, int, bool]: See description above """ # change all args in a list of value filters = { key: request.args.getlist(key) for key, value in request.args.items() } if "limit" in filters: result_limit = filters.pop("limit")[0] else: result_limit = current_app.config["SYNTHESE"]["NB_MAX_OBS_MAP"] query = select([VSyntheseForWebApp ]).order_by(VSyntheseForWebApp.date_min.desc()) synthese_query_class = SyntheseQuery(VSyntheseForWebApp, query, filters) synthese_query_class.filter_query_all_filters(info_role) data = DB.engine.execute(synthese_query_class.query.limit(result_limit)) # q = synthese_query.filter_query_all_filters(VSyntheseForWebApp, q, filters, info_role) # data = q.limit(result_limit) columns = current_app.config["SYNTHESE"][ "COLUMNS_API_SYNTHESE_WEB_APP"] + MANDATORY_COLUMNS features = [] for d in data: feature = d.get_geofeature(columns=columns) feature["properties"]["nom_vern_or_lb_nom"] = ( d.lb_nom if d.nom_vern is None else d.nom_vern) features.append(feature) return { "data": FeatureCollection(features), "nb_obs_limited": len(features) == current_app.config["SYNTHESE"]["NB_MAX_OBS_MAP"], "nb_total": len(features), }
def export_metadata(info_role): """Route to export the metadata in CSV .. :quickref: Synthese; The table synthese is join with gn_synthese.v_metadata_for_export The column jdd_id is mandatory in the view gn_synthese.v_metadata_for_export POST parameters: Use a list of id_synthese (in POST parameters) to filter the v_synthese_for_export_view """ if request.json: filters = request.json elif request.data: # decode byte to str - compat python 3.5 filters = json.loads(request.data.decode("utf-8")) else: filters = { key: request.args.getlist(key) for key, value in request.args.items() } metadata_view = GenericTable(tableName="v_metadata_for_export", schemaName="gn_synthese", engine=DB.engine) q = DB.session.query(distinct(VSyntheseForWebApp.id_dataset), metadata_view.tableDef).join( metadata_view.tableDef, getattr( metadata_view.tableDef.columns, current_app.config["SYNTHESE"] ["EXPORT_METADATA_ID_DATASET_COL"], ) == VSyntheseForWebApp.id_dataset, ) q = select( [distinct(VSyntheseForWebApp.id_dataset), metadata_view.tableDef]) synthese_query_class = SyntheseQuery(VSyntheseForWebApp, q, filters) synthese_query_class.add_join( metadata_view.tableDef, getattr( metadata_view.tableDef.columns, current_app.config["SYNTHESE"]["EXPORT_METADATA_ID_DATASET_COL"], ), VSyntheseForWebApp.id_dataset) synthese_query_class.filter_query_all_filters(info_role) data = DB.engine.execute(synthese_query_class.query) return to_csv_resp( datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S"), data=[metadata_view.as_dict(d) for d in data], separator=";", columns=[db_col.key for db_col in metadata_view.tableDef.columns], )
def export_status(info_role): """ Route to get all the protection status of a synthese search Parameters: - HTTP-GET: the same that the /synthese endpoint (all the filter in web app) Get the CRUVED from 'R' action because we don't give observations X/Y but only statuts and to be constistant with the data displayed in the web interface """ filters = {key: request.args.getlist(key) for key, value in request.args.items()} # initalize the select object q = select( [ distinct(VSyntheseForWebApp.cd_nom), Taxref.nom_complet, Taxref.cd_ref, Taxref.nom_vern, TaxrefProtectionArticles.type_protection, TaxrefProtectionArticles.article, TaxrefProtectionArticles.intitule, TaxrefProtectionArticles.arrete, TaxrefProtectionArticles.date_arrete, TaxrefProtectionArticles.url, ] ) synthese_query_class = SyntheseQuery(VSyntheseForWebApp, q, filters) # add join synthese_query_class.add_join(Taxref, Taxref.cd_nom, VSyntheseForWebApp.cd_nom) synthese_query_class.add_join( TaxrefProtectionEspeces, TaxrefProtectionEspeces.cd_nom, VSyntheseForWebApp.cd_nom, ) synthese_query_class.add_join( TaxrefProtectionArticles, TaxrefProtectionArticles.cd_protection, TaxrefProtectionEspeces.cd_protection, ) # filter with all get params q = synthese_query_class.filter_query_all_filters(info_role) data = DB.engine.execute(q) protection_status = [] for d in data: row = OrderedDict( [ ("nom_complet", d["nom_complet"]), ("nom_vern", d["nom_vern"]), ("cd_nom", d["cd_nom"]), ("cd_ref", d["cd_ref"]), ("type_protection", d["type_protection"]), ("article", d["article"]), ("intitule", d["intitule"]), ("arrete", d["arrete"]), ("date_arrete", d["date_arrete"]), ("url", d["url"]), ] ) protection_status.append(row) export_columns = [ "nom_complet", "nom_vern", "cd_nom", "cd_ref", "type_protection", "article", "intitule", "arrete", "date_arrete", "url", ] return to_csv_resp( datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S"), protection_status, separator=";", columns=export_columns, )
def get_synthese_data(info_role): """ Return synthese and t_validations data filtered by form params Params must have same synthese fields names .. :quickref: Validation; Parameters: ------------ info_role (User): Information about the user asking the route. Auto add with kwargs Returns ------- dict { "data": FeatureCollection "nb_obs_limited": int est-ce que le nombre de données retournée est > au nb limites "nb_total": nb_total, } """ if request.json: filters = request.json elif request.data: # decode byte to str - compat python 3.5 filters = json.loads(request.data.decode("utf-8")) else: filters = { key: request.args.get(key) for key, value in request.args.items() } if "limit" in filters: result_limit = filters.pop("limit") else: result_limit = blueprint.config["NB_MAX_OBS_MAP"] # Construction de la requête select # Les champs correspondent aux champs obligatoires # + champs définis par l'utilisateur columns = (blueprint.config["COLUMNS_API_VALIDATION_WEB_APP"] + blueprint.config["MANDATORY_COLUMNS"]) select_columns = [] serializer = {} for c in columns: try: att = getattr(VSyntheseValidation, c) select_columns.append(att) serializer[c] = SERIALIZERS.get( att.type.__class__.__name__.lower(), lambda x: x) except AttributeError as error: log.warning("Validation : colonne {} inexistante".format(c)) # Construction de la requête avec SyntheseQuery # Pour profiter des opérations CRUVED query = (select(select_columns).where( VSyntheseValidation.the_geom_4326.isnot(None)).order_by( VSyntheseValidation.date_min.desc())) validation_query_class = SyntheseQuery(VSyntheseValidation, query, filters) validation_query_class.filter_query_all_filters(info_role) result = DB.engine.execute( validation_query_class.query.limit(result_limit)) # TODO nb_total factice nb_total = 0 geojson_features = [] properties = {} for r in result: properties = {k: serializer[k](r[k]) for k in serializer.keys()} properties["nom_vern_or_lb_nom"] = (r["nom_vern"] if r["nom_vern"] else r["lb_nom"]) geojson = ast.literal_eval(r["geojson"]) geojson["properties"] = properties geojson["id"] = r["id_synthese"] geojson_features.append(geojson) return { "data": FeatureCollection(geojson_features), "nb_obs_limited": nb_total == blueprint.config["NB_MAX_OBS_MAP"], "nb_total": nb_total, }
def get_synthese_data(info_role): """ Return synthese and t_validations data filtered by form params Params must have same synthese fields names .. :quickref: Validation; Parameters: ------------ info_role (User): Information about the user asking the route. Auto add with kwargs truc (int): essai Returns ------- dict test """ # try: filters = { key: request.args.getlist(key) for key, value in request.args.items() } for key, value in filters.items(): if "," in value[0] and key != "geoIntersection": filters[key] = value[0].split(",") if "limit" in filters: result_limit = filters.pop("limit")[0] else: result_limit = blueprint.config["NB_MAX_OBS_MAP"] query = (select([ VSyntheseValidation.cd_nomenclature_validation_status, VSyntheseValidation.dataset_name, VSyntheseValidation.date_min, VSyntheseValidation.id_nomenclature_valid_status, VSyntheseValidation.id_synthese, VSyntheseValidation.nom_valide, VSyntheseValidation.nom_vern, VSyntheseValidation.geojson, VSyntheseValidation.observers, VSyntheseValidation.validation_auto, VSyntheseValidation.validation_date, VSyntheseValidation.nom_vern, VSyntheseValidation.lb_nom, VSyntheseValidation.cd_nom, VSyntheseValidation.comment_description, VSyntheseValidation.altitude_min, VSyntheseValidation.altitude_max, VSyntheseValidation.unique_id_sinp, VSyntheseValidation.meta_update_date, ]).where(VSyntheseValidation.the_geom_4326.isnot(None)).order_by( VSyntheseValidation.date_min.desc())) validation_query_class = SyntheseQuery(VSyntheseValidation, query, filters) validation_query_class.filter_query_all_filters(info_role) result = DB.engine.execute( validation_query_class.query.limit(result_limit)) nb_total = 0 columns = (blueprint.config["COLUMNS_API_VALIDATION_WEB_APP"] + blueprint.config["MANDATORY_COLUMNS"]) geojson_features = [] for r in result: properties = { "id_synthese": r["id_synthese"], "cd_nomenclature_validation_status": r["cd_nomenclature_validation_status"], "dataset_name": r["dataset_name"], "date_min": str(r["date_min"]), "nom_vern_or_lb_nom": r["nom_vern"] if r["nom_vern"] else r["lb_nom"], "observers": r["observers"], "validation_auto": r["validation_auto"], "validation_date": str(r["validation_date"]), "altitude_min": r["altitude_min"], "altitude_max": r["altitude_max"], "comment": r["comment_description"], "cd_nom": r["cd_nom"], "unique_id_sinp": str(r["unique_id_sinp"]), "meta_update_date": str(r["meta_update_date"]), } geojson = ast.literal_eval(r["geojson"]) geojson["properties"] = properties geojson["id"] = r["id_synthese"] geojson_features.append(geojson) return { "data": FeatureCollection(geojson_features), "nb_obs_limited": nb_total == blueprint.config["NB_MAX_OBS_MAP"], "nb_total": nb_total, }
def get_observations_for_web(info_role): """Optimized route to serve data for the frontend with all filters. .. :quickref: Synthese; Get filtered observations Query filtered by any filter, returning all the fields of the view v_synthese_for_export:: properties = { "id": r["id_synthese"], "date_min": str(r["date_min"]), "cd_nom": r["cd_nom"], "nom_vern_or_lb_nom": r["nom_vern"] if r["nom_vern"] else r["lb_nom"], "lb_nom": r["lb_nom"], "dataset_name": r["dataset_name"], "observers": r["observers"], "url_source": r["url_source"], "entity_source_pk_value": r["entity_source_pk_value"], } geojson = ast.literal_eval(r["st_asgeojson"]) geojson["properties"] = properties :param str info_role: Role used to get the associated filters, **TBC** :qparam str limit: Limit number of synthese returned. Defaults to NB_MAX_OBS_MAP. :qparam str cd_ref: Filter by TAXREF cd_ref attribute :qparam str taxonomy_group2_inpn: Filter by TAXREF group2_inpn attribute :qparam str taxonomy_id_hab: Filter by TAXREF id_habitat attribute :qparam str taxonomy_lr: Filter by TAXREF cd_ref attribute :qparam str taxhub_attribut*: Generig TAXREF filter, given attribute & value :qparam str observers: Filter on observer :qparam str id_organism: Filter on organism :qparam str date_min: Start date :qparam str date_max: End date :qparam str id_acquisition_framework: *tbd* :qparam str geoIntersection: Intersect with the geom send from the map :qparam str period_start: *tbd* :qparam str period_end: *tbd* :qparam str area*: Generic filter on area :qparam str *: Generic filter, given by colname & value :>jsonarr array data: Array of synthese with geojson key, see above :>jsonarr int nb_total: Number of observations :>jsonarr bool nb_obs_limited: Is number of observations capped """ if request.json: filters = request.json elif request.data: # decode byte to str - compat python 3.5 filters = json.loads(request.data.decode("utf-8")) else: filters = { key: request.args.getlist(key) for key, value in request.args.items() } # Passage de l'ensemble des filtres # en array pour des questions de compatibilité # TODO voir si ça ne peut pas être modifié for k in filters.keys(): if not isinstance(filters[k], list): filters[k] = [filters[k]] if "limit" in filters: result_limit = filters.pop("limit")[0] else: result_limit = current_app.config["SYNTHESE"]["NB_MAX_OBS_MAP"] query = ( select( [ VSyntheseForWebApp.id_synthese, VSyntheseForWebApp.date_min, VSyntheseForWebApp.lb_nom, VSyntheseForWebApp.cd_nom, VSyntheseForWebApp.nom_vern, VSyntheseForWebApp.st_asgeojson, VSyntheseForWebApp.observers, VSyntheseForWebApp.dataset_name, VSyntheseForWebApp.url_source, VSyntheseForWebApp.entity_source_pk_value, ] ) .where(VSyntheseForWebApp.the_geom_4326.isnot(None)) .order_by(VSyntheseForWebApp.date_min.desc()) ) synthese_query_class = SyntheseQuery(VSyntheseForWebApp, query, filters) synthese_query_class.filter_query_all_filters(info_role) result = DB.engine.execute(synthese_query_class.query.limit(result_limit)) geojson_features = [] for r in result: properties = { "id": r["id_synthese"], "date_min": str(r["date_min"]), "cd_nom": r["cd_nom"], "nom_vern_or_lb_nom": r["nom_vern"] if r["nom_vern"] else r["lb_nom"], "lb_nom": r["lb_nom"], "dataset_name": r["dataset_name"], "observers": r["observers"], "url_source": r["url_source"], "entity_source_pk_value": r["entity_source_pk_value"], } geojson = ast.literal_eval(r["st_asgeojson"]) geojson["properties"] = properties geojson_features.append(geojson) return { "data": FeatureCollection(geojson_features), "nb_total": len(geojson_features), "nb_obs_limited": len(geojson_features) == current_app.config["SYNTHESE"]["NB_MAX_OBS_MAP"], }
def export_status(info_role): """Route to get all the protection status of a synthese search .. :quickref: Synthese; Get the CRUVED from 'R' action because we don't give observations X/Y but only statuts and to be constistant with the data displayed in the web interface Parameters: - HTTP-GET: the same that the /synthese endpoint (all the filter in web app) """ filters = {key: request.args.getlist(key) for key, value in request.args.items()} # initalize the select object q = select( [ distinct(VSyntheseForWebApp.cd_nom), Taxref.nom_complet, Taxref.cd_ref, Taxref.nom_vern, TaxrefProtectionArticles.type_protection, TaxrefProtectionArticles.article, TaxrefProtectionArticles.intitule, TaxrefProtectionArticles.arrete, TaxrefProtectionArticles.date_arrete, TaxrefProtectionArticles.url, ] ) synthese_query_class = SyntheseQuery(VSyntheseForWebApp, q, filters) # add join synthese_query_class.add_join( Taxref, Taxref.cd_nom, VSyntheseForWebApp.cd_nom) synthese_query_class.add_join( TaxrefProtectionEspeces, TaxrefProtectionEspeces.cd_nom, VSyntheseForWebApp.cd_nom, ) synthese_query_class.add_join( TaxrefProtectionArticles, TaxrefProtectionArticles.cd_protection, TaxrefProtectionEspeces.cd_protection, ) # filter with all get params q = synthese_query_class.filter_query_all_filters(info_role) data = DB.engine.execute(q) protection_status = [] for d in data: row = OrderedDict( [ ("nom_complet", d["nom_complet"]), ("nom_vern", d["nom_vern"]), ("cd_nom", d["cd_nom"]), ("cd_ref", d["cd_ref"]), ("type_protection", d["type_protection"]), ("article", d["article"]), ("intitule", d["intitule"]), ("arrete", d["arrete"]), ("date_arrete", d["date_arrete"]), ("url", d["url"]), ] ) protection_status.append(row) export_columns = [ "nom_complet", "nom_vern", "cd_nom", "cd_ref", "type_protection", "article", "intitule", "arrete", "date_arrete", "url", ] return to_csv_resp( datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S"), protection_status, separator=";", columns=export_columns, )
def get_synthese_data(info_role): """ Return synthese and t_validations data filtered by form params Params must have same synthese fields names .. :quickref: Validation; Parameters: ------------ info_role (User): Information about the user asking the route. Auto add with kwargs Returns ------- FeatureCollection """ enable_profile = current_app.config["FRONTEND"]["ENABLE_PROFILES"] fields = { 'id_synthese', 'unique_id_sinp', 'entity_source_pk_value', 'meta_update_date', 'id_nomenclature_valid_status', 'nomenclature_valid_status.cd_nomenclature', 'nomenclature_valid_status.mnemonique', 'nomenclature_valid_status.label_default', 'last_validation.validation_date', 'last_validation.validation_auto', 'taxref.cd_nom', 'taxref.nom_vern', 'taxref.lb_nom', 'taxref.nom_vern_or_lb_nom', 'dataset.validable' } if enable_profile: fields |= { 'profile.score', 'profile.valid_phenology', 'profile.valid_altitude', 'profile.valid_distribution', } fields |= {col['column_name'] for col in blueprint.config["COLUMN_LIST"]} filters = request.json or {} result_limit = filters.pop("limit", blueprint.config["NB_MAX_OBS_MAP"]) lateral_join = {} """ 1) We start creating the query with SQLAlchemy ORM. 2) We convert this query to SQLAlchemy Core in order to use SyntheseQuery utility class to apply user filters. 3) We get back the results in the ORM through from_statement. We populate relationships with contains_eager. We create a lot of aliases, that are selected at step 1, and given to contains_eager at step 3 to correctly identify columns to use to populate relationships models. """ last_validation_subquery = (TValidations.query.filter( TValidations.uuid_attached_row == Synthese.unique_id_sinp).order_by( TValidations.validation_date.desc()).limit(1).subquery().lateral( 'last_validation')) last_validation = aliased(TValidations, last_validation_subquery) lateral_join = {last_validation: Synthese.last_validation} if enable_profile: profile_subquery = (VConsistancyData.query.filter( VConsistancyData.id_synthese == Synthese.id_synthese).limit( result_limit).subquery().lateral('profile')) profile = aliased(VConsistancyData, profile_subquery) lateral_join[profile] = Synthese.profile relationships = list({ field.split('.', 1)[0] for field in fields if '.' in field and not (field.startswith('last_validation.') or field.startswith('profile.')) }) # Get dataset relationship : filter only validable dataset dataset_index = relationships.index('dataset') relationships = [getattr(Synthese, rel) for rel in relationships] aliases = [aliased(rel.property.mapper.class_) for rel in relationships] dataset_alias = aliases[dataset_index] query = (db.session.query(Synthese, *aliases, *lateral_join.keys())) for rel, alias in zip(relationships, aliases): query = query.outerjoin(rel.of_type(alias)) for alias in lateral_join.keys(): query = query.outerjoin(alias, sa.true()) query = (query.filter(Synthese.the_geom_4326.isnot(None)).order_by( Synthese.date_min.desc())) # filter with profile if enable_profile: score = filters.pop("score", None) if score is not None: query = query.filter(profile.score == score) valid_distribution = filters.pop("valid_distribution", None) if valid_distribution is not None: query = query.filter( profile.valid_distribution.is_(valid_distribution)) valid_altitude = filters.pop("valid_altitude", None) if valid_altitude is not None: query = query.filter(profile.valid_altitude.is_(valid_altitude)) valid_phenology = filters.pop("valid_phenology", None) if valid_phenology is not None: query = query.filter(profile.valid_phenology.is_(valid_phenology)) if filters.pop("modif_since_validation", None): query = query.filter( Synthese.meta_update_date > last_validation.validation_date) # Filter only validable dataset query = query.filter(dataset_alias.validable == True) # Step 2: give SyntheseQuery the Core selectable from ORM query assert (len(query.selectable.froms) == 1) query = (SyntheseQuery( Synthese, query.selectable, filters, query_joins=query.selectable.froms[0]).filter_query_all_filters( info_role).limit(result_limit)) # Step 3: Construct Synthese model from query result query = (Synthese.query.options(*[ contains_eager(rel, alias=alias) for rel, alias in zip(relationships, aliases) ]).options(*[ contains_eager(rel, alias=alias) for alias, rel in lateral_join.items() ]).from_statement(query)) # The raise option ensure that we have correctly retrived relationships data at step 3 return jsonify( query.as_geofeaturecollection(fields=fields, unloaded='raise'))
def export_observations_web(info_role): """Optimized route for observations web export. .. :quickref: Synthese; This view is customisable by the administrator Some columns are mandatory: id_synthese, geojson and geojson_local to generate the exported files POST parameters: Use a list of id_synthese (in POST parameters) to filter the v_synthese_for_export_view :query str export_format: str<'csv', 'geojson', 'shapefiles', 'gpkg'> """ params = request.args export_format = params.get("export_format", "csv") # Test export_format if not export_format in current_app.config["SYNTHESE"]["EXPORT_FORMAT"]: raise BadRequest("Unsupported format") # set default to csv export_view = GenericTableGeo( tableName="v_synthese_for_export", schemaName="gn_synthese", engine=DB.engine, geometry_field=None, srid=current_app.config["LOCAL_SRID"], ) # get list of id synthese from POST id_list = request.get_json() db_cols_for_shape = [] columns_to_serialize = [] # loop over synthese config to get the columns for export for db_col in export_view.db_cols: if db_col.key in current_app.config["SYNTHESE"]["EXPORT_COLUMNS"]: db_cols_for_shape.append(db_col) columns_to_serialize.append(db_col.key) query = select([export_view.tableDef]).where( export_view.tableDef.columns[current_app.config["SYNTHESE"]["EXPORT_ID_SYNTHESE_COL"]].in_( id_list ) ) synthese_query_class = SyntheseQuery( export_view.tableDef, query, {}, id_synthese_column=current_app.config["SYNTHESE"]["EXPORT_ID_SYNTHESE_COL"], id_dataset_column=current_app.config["SYNTHESE"]["EXPORT_ID_DATASET_COL"], observers_column=current_app.config["SYNTHESE"]["EXPORT_OBSERVERS_COL"], id_digitiser_column=current_app.config["SYNTHESE"]["EXPORT_ID_DIGITISER_COL"], with_generic_table=True, ) # check R and E CRUVED to know if we filter with cruved cruved = cruved_scope_for_user_in_module(info_role.id_role, module_code="SYNTHESE")[0] if cruved["R"] > cruved["E"]: synthese_query_class.filter_query_with_cruved(info_role) results = DB.session.execute(synthese_query_class.query.limit( current_app.config["SYNTHESE"]["NB_MAX_OBS_EXPORT"]) ) file_name = datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S") file_name = filemanager.removeDisallowedFilenameChars(file_name) if export_format == "csv": formated_data = [export_view.as_dict(d, columns=columns_to_serialize) for d in results] return to_csv_resp(file_name, formated_data, separator=";", columns=columns_to_serialize) elif export_format == "geojson": features = [] for r in results: geometry = ast.literal_eval( getattr(r, current_app.config["SYNTHESE"]["EXPORT_GEOJSON_4326_COL"]) ) feature = Feature( geometry=geometry, properties=export_view.as_dict(r, columns=columns_to_serialize), ) features.append(feature) results = FeatureCollection(features) return to_json_resp(results, as_file=True, filename=file_name, indent=4) else: try: dir_name, file_name = export_as_geo_file( export_format=export_format, export_view=export_view, db_cols=db_cols_for_shape, geojson_col=current_app.config["SYNTHESE"]["EXPORT_GEOJSON_LOCAL_COL"], data=results, file_name=file_name, ) return send_from_directory(dir_name, file_name, as_attachment=True) except GeonatureApiError as e: message = str(e) return render_template( "error.html", error=message, redirect=current_app.config["URL_APPLICATION"] + "/#/synthese", )
def export_taxon_web(info_role): """Optimized route for taxon web export. .. :quickref: Synthese; This view is customisable by the administrator Some columns are mandatory: cd_ref POST parameters: Use a list of cd_ref (in POST parameters) to filter the v_synthese_taxon_for_export_view :query str export_format: str<'csv'> """ taxon_view = GenericTable( tableName="v_synthese_taxon_for_export_view", schemaName="gn_synthese", engine=DB.engine, ) columns = taxon_view.tableDef.columns # Test de conformité de la vue v_synthese_for_export_view try: assert hasattr(taxon_view.tableDef.columns, "cd_ref") except AssertionError as e: return ( { "msg": """ View v_synthese_taxon_for_export_view must have a cd_ref column \n trace: {} """.format( str(e) ) }, 500, ) id_list = request.get_json() # check R and E CRUVED to know if we filter with cruved cruved = cruved_scope_for_user_in_module(info_role.id_role, module_code="SYNTHESE")[0] sub_query = ( select([ VSyntheseForWebApp.cd_ref, func.count(distinct(VSyntheseForWebApp.id_synthese)).label("nb_obs"), func.min(VSyntheseForWebApp.date_min).label("date_min"), func.max(VSyntheseForWebApp.date_max).label("date_max") ]) .where(VSyntheseForWebApp.id_synthese.in_(id_list)) .group_by(VSyntheseForWebApp.cd_ref) ) synthese_query_class = SyntheseQuery( VSyntheseForWebApp, sub_query, {}, ) if cruved["R"] > cruved["E"]: # filter on cruved synthese_query_class.filter_query_with_cruved(info_role) subq = synthese_query_class.query.alias("subq") q = DB.session.query(*columns, subq.c.nb_obs, subq.c.date_min, subq.c.date_max).join( subq, subq.c.cd_ref == columns.cd_ref ) return to_csv_resp( datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S"), data=serializeQuery(q.all(), q.column_descriptions), separator=";", columns=[db_col.key for db_col in columns] + ["nb_obs", "date_min", "date_max"], )