def filter_query_with_cruved( model, q, user, id_station_col="id_station", id_dataset_column="id_dataset", observers_column="observers_txt", id_digitiser_column="id_digitiser", filter_on_obs_txt=True, with_generic_table=False, ): """ Filter the query with the cruved authorization of a user Returns: - A SQLA Query object """ # if with geniric table , the column are located in model.columns, else in model if with_generic_table: model_temp = model.columns else: model_temp = model # get the mandatory column try: model_id_station_col = getattr(model_temp, id_station_col) model_id_dataset_column = getattr(model_temp, id_dataset_column) model_observers_column = getattr(model_temp, observers_column) model_id_digitiser_column = getattr(model_temp, id_digitiser_column) except AttributeError as e: raise GeonatureApiError( """the {model} table does not have a column {e} If you change the {model} table, please edit your synthese config (cf EXPORT_***_COL) """.format( e=e, model=model ) ) if user.value_filter in ("1", "2"): sub_query_id_role = DB.session.query(CorStationObserverOccHab).filter( CorStationObserverOccHab.id_role == user.id_role).exists() ors_filters = [ sub_query_id_role, model_id_digitiser_column == user.id_role, ] if filter_on_obs_txt: user_fullname1 = user.nom_role + " " + user.prenom_role + "%" user_fullname2 = user.prenom_role + " " + user.nom_role + "%" ors_filters.append(model_observers_column.ilike(user_fullname1)) ors_filters.append(model_observers_column.ilike(user_fullname2)) if user.value_filter == "1": allowed_datasets = TDatasets.get_user_datasets( user, only_query=True, only_user=True).exists() ors_filters.append(allowed_datasets) q = q.filter(or_(*ors_filters)) elif user.value_filter == "2": allowed_datasets = TDatasets.get_user_datasets( user, only_query=True).exists() ors_filters.append(allowed_datasets) q = q.filter(or_(*ors_filters)) return q
def filter_query_with_cruved(self, user): """ Filter the query with the cruved authorization of a user """ if user.value_filter in ("1", "2"): # get id synthese where user is observer subquery_observers = (select([ CorObserverSynthese.id_synthese ]).select_from(CorObserverSynthese).where( CorObserverSynthese.id_role == user.id_role)) ors_filters = [ self.model_id_syn_col.in_(subquery_observers), self.model_id_digitiser_column == user.id_role, ] if current_app.config["SYNTHESE"][ "CRUVED_SEARCH_WITH_OBSERVER_AS_TXT"]: user_fullname1 = user.nom_role + " " + user.prenom_role + "%" user_fullname2 = user.prenom_role + " " + user.nom_role + "%" ors_filters.append( self.model_observers_column.ilike(user_fullname1)) ors_filters.append( self.model_observers_column.ilike(user_fullname2)) if user.value_filter == "1": allowed_datasets = TDatasets.get_user_datasets(user, only_user=True) ors_filters.append( self.model_id_dataset_column.in_(allowed_datasets)) self.query = self.query.where(or_(*ors_filters)) elif user.value_filter == "2": allowed_datasets = TDatasets.get_user_datasets(user) ors_filters.append( self.model_id_dataset_column.in_(allowed_datasets)) self.query = self.query.where(or_(*ors_filters))
def general_stats(info_role): """Return stats about synthese. .. :quickref: Synthese; - nb of observations - nb of distinct species - nb of distinct observer - nb ob datasets """ allowed_datasets = TDatasets.get_user_datasets(info_role) q = DB.session.query( func.count(Synthese.id_dataset), func.count(func.distinct(Synthese.cd_nom)), func.count(func.distinct(Synthese.observers)), ) q = synthese_query.filter_query_with_cruved(Synthese, q, info_role) data = q.one() data = { "nb_data": data[0], "nb_species": data[1], "nb_observers": data[2], "nb_dataset": len(allowed_datasets), } return data
def filter_query_with_cruved(self, user): """ Filter the query with the cruved authorization of a user """ allowed_datasets = TDatasets.get_user_datasets(user) if user.value_filter in ("1", "2"): self.add_join( CorObserverSynthese, CorObserverSynthese.id_synthese, self.model.id_synthese, join_type="left", ) ors_filters = [ CorObserverSynthese.id_role == user.id_role, self.model.id_digitiser == user.id_role, ] if current_app.config["SYNTHESE"]["CRUVED_SEARCH_WITH_OBSERVER_AS_TXT"]: user_fullname1 = user.nom_role + " " + user.prenom_role + "%" user_fullname2 = user.prenom_role + " " + user.nom_role + "%" ors_filters.append(self.model.observers.ilike(user_fullname1)) ors_filters.append(self.model.observers.ilike(user_fullname2)) if user.value_filter == "1": self.query = self.query.where(or_(*ors_filters)) elif user.value_filter == "2": ors_filters.append(self.model.id_dataset.in_(allowed_datasets)) self.query = self.query.where(or_(*ors_filters))
def filter_query_generic_table(self, user): """ Return a prepared query filter with cruved authorization from a generic_table (a view) """ q = DB.session.query(self.model.tableDef) if user.value_filter in ("1", "2"): q = q.outerjoin( corRoleRelevesOccurrence, self.model.tableDef.columns.id_releve_occtax == corRoleRelevesOccurrence.id_releve_occtax, ) if user.value_filter == "2": allowed_datasets = TDatasets.get_user_datasets(user) q = q.filter( or_( self.model.tableDef.columns.id_dataset.in_( tuple(allowed_datasets)), corRoleRelevesOccurrence.id_role == user.id_role, self.model.tableDef.columns.id_digitiser == user.id_role, )) elif user.value_filter == "1": q = q.filter( or_( corRoleRelevesOccurrence.id_role == user.id_role, self.model.tableDef.columns.id_digitiser == user.id_role, )) return q
def delete_dataset(info_role, ds_id): """ Delete a dataset .. :quickref: Metadata; """ if not is_dataset_deletable(ds_id): raise GeonatureApiError( "La suppression du jeu de données n'est pas possible car des données y sont rattachées dans la Synthèse", 406, ) user_actor = TDatasets.get_user_datasets(info_role) dataset = TDatasets.query.get(ds_id) allowed = dataset.user_is_allowed_to(user_actor, info_role, info_role.value_filter) if not allowed: raise Forbidden( f"User {info_role.id_role} cannot delete dataset {dataset.id_dataset}" ) DB.session.query(TDatasets).filter(TDatasets.id_dataset == ds_id).delete() DB.session.commit() return '', 204
def filter_query_generic_table(self, user): """ Return a prepared query filter with cruved authorization from a generic_table (a view) """ q = DB.session.query(self.model.tableDef) if user.value_filter in ("1", "2"): q = q.outerjoin( corRoleRelevesOccurrence, self.model.tableDef.columns.id_releve_occtax == corRoleRelevesOccurrence.columns.id_releve_occtax, ) if user.value_filter == "2": allowed_datasets = TDatasets.get_user_datasets(user) q = q.filter( or_( self.model.tableDef.columns.id_dataset.in_( tuple(allowed_datasets) ), corRoleRelevesOccurrence.columns.id_role == user.id_role, self.model.tableDef.columns.id_digitiser == user.id_role, ) ) elif user.value_filter == "1": q = q.filter( or_( corRoleRelevesOccurrence.columns.id_role == user.id_role, self.model.tableDef.columns.id_digitiser == user.id_role, ) ) return q
def export(info_role): filters = dict(request.args) if 'limit' in filters: result_limit = filters.pop('limit')[0] else: result_limit = current_app.config['SYNTHESE']['NB_MAX_OBS_EXPORT'] export_format = filters.pop('export_format')[0] allowed_datasets = TDatasets.get_user_datasets(info_role) q = DB.session.query(VSyntheseForExport) q = synthese_query.filter_query_all_filters(VSyntheseForExport, q, filters, info_role, allowed_datasets) q = q.order_by(VSyntheseForExport.date_min.desc()) data = q.limit(result_limit) file_name = datetime.datetime.now().strftime('%Y_%m_%d_%Hh%Mm%S') file_name = filemanager.removeDisallowedFilenameChars(file_name) formated_data = [d.as_dict_ordered() for d in data] export_columns = formated_data[0].keys() if export_format == 'csv': return to_csv_resp( file_name, formated_data, separator=';', columns=export_columns, ) elif export_format == 'geojson': results = FeatureCollection(formated_data) return to_json_resp(results, as_file=True, filename=file_name, indent=4) else: filemanager.delete_recursively(str(ROOT_DIR / 'backend/static/shapefiles'), excluded_files=['.gitkeep']) dir_path = str(ROOT_DIR / 'backend/static/shapefiles') FionaShapeService.create_shapes_struct( db_cols=VSyntheseForExport.db_cols, srid=current_app.config['LOCAL_SRID'], dir_path=dir_path, file_name=file_name, col_mapping=current_app.config['SYNTHESE']['EXPORT_COLUMNS']) for row in data: geom = row.the_geom_local row_as_dict = row.as_dict_ordered() FionaShapeService.create_feature(row_as_dict, geom) FionaShapeService.save_and_zip_shapefiles() return send_from_directory(dir_path, file_name + '.zip', as_attachment=True)
def get_status(info_role): """ Route to get all the protection status of a synthese search """ filters = dict(request.args) q = (DB.session.query(distinct(VSyntheseForWebApp.cd_nom), Taxref, TaxrefProtectionArticles).join( Taxref, Taxref.cd_nom == VSyntheseForWebApp.cd_nom).join( TaxrefProtectionEspeces, TaxrefProtectionEspeces.cd_nom == VSyntheseForWebApp.cd_nom).join( TaxrefProtectionArticles, TaxrefProtectionArticles.cd_protection == TaxrefProtectionEspeces.cd_protection)) allowed_datasets = TDatasets.get_user_datasets(info_role) q = synthese_query.filter_query_all_filters(VSyntheseForWebApp, q, filters, info_role, allowed_datasets) data = q.all() protection_status = [] for d in data: taxon = d[1].as_dict() protection = d[2].as_dict() row = OrderedDict([ ('nom_complet', taxon['nom_complet']), ('nom_vern', taxon['nom_vern']), ('cd_nom', taxon['cd_nom']), ('cd_ref', taxon['cd_ref']), ('type_protection', protection['type_protection']), ('article', protection['article']), ('intitule', protection['intitule']), ('arrete', protection['arrete']), ('date_arrete', protection['date_arrete']), ('url', protection['url']), ]) protection_status.append(row) export_columns = [ 'nom_complet', 'nom_vern', 'cd_nom', 'cd_ref', 'type_protection', 'article', 'intitule', 'arrete', 'date_arrete', 'url' ] file_name = datetime.datetime.now().strftime('%Y_%m_%d_%Hh%Mm%S') return to_csv_resp( file_name, protection_status, separator=';', columns=export_columns, )
def get_dataset_details_dict(id_dataset, session_role): """ Return a dataset from TDatasetDetails model (with all relationships) return also the number of taxon and observation of the dataset Use for get_one datasert """ q = DB.session.query(TDatasetDetails) q = cruved_filter(q, TDatasetDetails, session_role) try: data = q.filter(TDatasetDetails.id_dataset == id_dataset).one() except NoResultFound: return None dataset = data.as_dict(True) imports = requests.get( current_app.config["API_ENDPOINT"] + "/import/by_dataset/" + str(id_dataset), headers={"Cookie": request.headers.get("Cookie")}, # recuperation du token ) if imports.status_code == 200: dataset["imports"] = imports.json() user_cruved = cruved_scope_for_user_in_module( id_role=session_role.id_role, module_code="METADATA", )[0] cruved = data.get_object_cruved( user_cruved=user_cruved, id_object=data.id_dataset, ids_object_user=TDatasets.get_user_datasets(session_role, only_user=True), ids_object_organism=TDatasets.get_user_datasets(session_role, only_user=False), ) dataset["cruved"] = cruved return dataset
def filter_query_with_autorization(self, user): q = DB.session.query(self.model) if user.value_filter == "2": allowed_datasets = TDatasets.get_user_datasets(user) q = q.filter( or_( self.model.id_dataset.in_(tuple(allowed_datasets)), self.model.observers.any(id_role=user.id_role), self.model.id_digitiser == user.id_role, )) elif user.value_filter == "1": q = q.filter( or_( self.model.observers.any(id_role=user.id_role), self.model.id_digitiser == user.id_role, )) return q
def filter_query_with_autorization(self, user): q = DB.session.query(self.model) if user.value_filter == "2": allowed_datasets = TDatasets.get_user_datasets(user) q = q.filter( or_( self.model.id_dataset.in_(tuple(allowed_datasets)), self.model.observers.any(id_role=user.id_role), self.model.id_digitiser == user.id_role, ) ) elif user.value_filter == "1": q = q.filter( or_( self.model.observers.any(id_role=user.id_role), self.model.id_digitiser == user.id_role, ) ) return q
def get_synthese(info_role): """ return synthese row(s) filtered by form params Params must have same synthese fields names """ filters = { key: value[0].split(',') for key, value in dict(request.args).items() } if 'limit' in filters: result_limit = filters.pop('limit')[0] else: result_limit = current_app.config['SYNTHESE']['NB_MAX_OBS_MAP'] allowed_datasets = TDatasets.get_user_datasets(info_role) q = DB.session.query(VSyntheseForWebApp) q = synthese_query.filter_query_all_filters(VSyntheseForWebApp, q, filters, info_role, allowed_datasets) q = q.order_by(VSyntheseForWebApp.date_min.desc()) nb_total = 0 data = q.limit(result_limit) columns = current_app.config['SYNTHESE'][ 'COLUMNS_API_SYNTHESE_WEB_APP'] + MANDATORY_COLUMNS features = [] for d in data: feature = d.get_geofeature(columns=columns) feature['properties'][ 'nom_vern_or_lb_nom'] = d.lb_nom if d.nom_vern is None else d.nom_vern features.append(feature) return { 'data': FeatureCollection(features), 'nb_obs_limited': nb_total == current_app.config['SYNTHESE']['NB_MAX_OBS_MAP'], 'nb_total': nb_total }
def general_stats(info_role): """ Return stats about synthese - nb of observations - nb of distinct species - nb of distinct observer - nb ob datasets """ allowed_datasets = TDatasets.get_user_datasets(info_role) q = DB.session.query( func.count(Synthese.id_dataset), func.count(func.distinct(Synthese.cd_nom)), func.count(func.distinct(Synthese.observers)), ) q = synthese_query.filter_query_with_cruved(Synthese, q, info_role) data = q.one() data = { "nb_data": data[0], "nb_species": data[1], "nb_observers": data[2], "nb_dataset": len(allowed_datasets), } return data
def delete_synthese(info_role, id_synthese): synthese_obs = DB.session.query(Synthese).get(id_synthese) user_datasets = TDatasets.get_user_datasets(info_role) synthese_releve = synthese_obs.get_observation_if_allowed( info_role, user_datasets) # get and delete source # TODO # est-ce qu'on peut supprimer les données historiques depuis la synthese source = DB.session.query(TSources).filter( TSources.id_source == synthese_obs.id_source).one() pk_field_source = source.entity_source_pk_field inter = pk_field_source.split('.') pk_field = inter.pop() table_source = inter.join('.') sql = text("DELETE FROM {table} WHERE {pk_field} = :id".format( table=table_source, pk_field=pk_field)) result = DB.engine.execute(sql, id=synthese_obs.entity_source_pk_value) # delete synthese obs DB.session.delete(synthese_releve) DB.session.commit() return {'message': 'delete with success'}, 200
def get_af_and_ds_metadata(info_role): """ Get all AF with their datasets The Cruved in only apply on dataset in order to see all the AF where the user have rights with its dataset Use in maplist Add the CRUVED permission for each row (Dataset and AD) .. :quickref: Metadata; :param info_role: add with kwargs :type info_role: TRole :returns: `dict{'data':list<AF with Datasets>, 'with_erros': <boolean>}` """ with_mtd_error = False if current_app.config["CAS_PUBLIC"]["CAS_AUTHENTIFICATION"]: # synchronise the CA and JDD from the MTD WS try: mtd_utils.post_jdd_from_user( id_user=info_role.id_role, id_organism=info_role.id_organisme ) except Exception as e: gunicorn_error_logger.info(e) log.error(e) with_mtd_error = True params = request.args.to_dict() params["orderby"] = "dataset_name" if "selector" not in params: params["selector"] = None datasets = filtered_ds_query(info_role, params).distinct().all() if len(datasets) == 0: return {"data": []} ids_dataset_user = TDatasets.get_user_datasets(info_role, only_user=True) ids_dataset_organisms = TDatasets.get_user_datasets(info_role, only_user=False) ids_afs_user = TAcquisitionFramework.get_user_af(info_role, only_user=True) ids_afs_org = TAcquisitionFramework.get_user_af(info_role, only_user=False) user_cruved = cruved_scope_for_user_in_module( id_role=info_role.id_role, module_code="METADATA", )[0] # get all af from the JDD filtered with cruved or af where users has rights ids_afs_cruved = ( [d.id_acquisition_framework for d in get_af_cruved(info_role, as_model=True)] if not params["selector"] else [] ) list_id_af = [d.id_acquisition_framework for d in datasets] + ids_afs_cruved afs = ( filtered_af_query(request.args) .filter(TAcquisitionFramework.id_acquisition_framework.in_(list_id_af)) .order_by(TAcquisitionFramework.acquisition_framework_name) .all() ) list_id_af = [af.id_acquisition_framework for af in afs] afs_dict = [] # get cruved for each AF and prepare dataset for af in afs: af_dict = af.as_dict( True, relationships=[ "creator", "cor_af_actor", "nomenclature_actor_role", "organism", "role", ], ) af_dict["cruved"] = af.get_object_cruved( user_cruved=user_cruved, id_object=af.id_acquisition_framework, ids_object_user=ids_afs_user, ids_object_organism=ids_afs_org, ) af_dict["datasets"] = [] af_dict["deletable"] = is_af_deletable(af.id_acquisition_framework) afs_dict.append(af_dict) # get cruved for each ds and push them in the af for d in datasets: dataset_dict = d.as_dict( recursif=True, relationships=[ "creator", "cor_dataset_actor", "nomenclature_actor_role", "organism", "role", ], ) if d.id_acquisition_framework not in list_id_af: continue dataset_dict["cruved"] = d.get_object_cruved( user_cruved=user_cruved, id_object=d.id_dataset, ids_object_user=ids_dataset_user, ids_object_organism=ids_dataset_organisms, ) # dataset_dict["observation_count"] = ( # DB.session.query(Synthese.cd_nom).filter(Synthese.id_dataset == d.id_dataset).count() # ) dataset_dict["deletable"] = is_dataset_deletable(d.id_dataset) af_of_dataset = get_af_from_id(d.id_acquisition_framework, afs_dict) af_of_dataset["datasets"].append(dataset_dict) afs_resp = {"data": afs_dict} if with_mtd_error: afs_resp["with_mtd_errors"] = True if not datasets: return afs_resp, 404 return afs_resp
def user_is_in_dataset_actor(self, user): return self.id_dataset in TDatasets.get_user_datasets(user)
def get_af_and_ds_metadata(info_role): """ Get all AF with their datasets The Cruved in only apply on dataset in order to see all the AF where the user have rights with its dataset Use in maplist Add the CRUVED permission for each row (Dataset and AD) .. :quickref: Metadata; :param info_role: add with kwargs :type info_role: TRole :returns: `dict{'data':list<AF with Datasets>, 'with_erros': <boolean>}` """ with_mtd_error = False if current_app.config["CAS_PUBLIC"]["CAS_AUTHENTIFICATION"]: # synchronise the CA and JDD from the MTD WS try: mtd_utils.post_jdd_from_user(id_user=info_role.id_role, id_organism=info_role.id_organisme) except Exception as e: gunicorn_error_logger.info(e) log.error(e) with_mtd_error = True params = request.args.to_dict() params["orderby"] = "dataset_name" datasets = get_datasets_cruved(info_role, params, as_model=True) ids_dataset_user = TDatasets.get_user_datasets(info_role, only_user=True) ids_dataset_organisms = TDatasets.get_user_datasets(info_role, only_user=False) ids_afs_user = TAcquisitionFramework.get_user_af(info_role, only_user=True) ids_afs_org = TAcquisitionFramework.get_user_af(info_role, only_user=False) user_cruved = cruved_scope_for_user_in_module( id_role=info_role.id_role, module_code="METADATA", )[0] # get all af from the JDD filtered with cruved or af where users has rights ids_afs_cruved = [ d.id_acquisition_framework for d in get_af_cruved(info_role, as_model=True) ] list_id_af = [d.id_acquisition_framework for d in datasets] + ids_afs_cruved afs = (DB.session.query(TAcquisitionFramework).filter( TAcquisitionFramework.id_acquisition_framework.in_( list_id_af)).order_by( TAcquisitionFramework.acquisition_framework_name).all()) afs_dict = [] # get cruved for each AF and prepare dataset for af in afs: af_dict = af.as_dict() af_dict["cruved"] = af.get_object_cruved( user_cruved=user_cruved, id_object=af.id_acquisition_framework, ids_object_user=ids_afs_user, ids_object_organism=ids_afs_org, ) af_dict["datasets"] = [] afs_dict.append(af_dict) # get cruved for each ds and push them in the af for d in datasets: dataset_dict = d.as_dict() dataset_dict["cruved"] = d.get_object_cruved( user_cruved=user_cruved, id_object=d.id_dataset, ids_object_user=ids_dataset_user, ids_object_organism=ids_dataset_organisms, ) af_of_dataset = get_af_from_id(d.id_acquisition_framework, afs_dict) af_of_dataset["datasets"].append(dataset_dict) afs_resp = {"data": afs_dict} if with_mtd_error: afs_resp["with_mtd_errors"] = True if not datasets: return afs_resp, 404 return afs_resp
def export_sinp(info_role): """ Return the data (CSV) at SINP from pr_occtax.export_occtax_sinp view If no paramater return all the dataset allowed of the user params: - id_dataset : integer - uuid_dataset: uuid """ viewSINP = GenericTable('export_occtax_dlb', 'pr_occtax', None) q = DB.session.query(viewSINP.tableDef) params = request.args allowed_datasets = TDatasets.get_user_datasets(info_role) # if params in empty and user not admin, # get the data off all dataset allowed if not params.get('id_dataset') and not params.get('uuid_dataset'): if info_role.tag_object_code != '3': allowed_uuid = (str(TDatasets.get_uuid(id_dataset)) for id_dataset in allowed_datasets) q = q.filter(viewSINP.tableDef.columns.jddId.in_(allowed_uuid)) # filter by dataset id or uuid else: if 'id_dataset' in params: id_dataset = int(params['id_dataset']) uuid_dataset = TDatasets.get_uuid(id_dataset) elif 'uuid_dataset' in params: id_dataset = TDatasets.get_id(params['uuid_dataset']) uuid_dataset = params['uuid_dataset'] # if data_scope 1 or 2, check if the dataset requested is allorws if (info_role.tag_object_code == '1' or info_role.tag_object_code == '2'): if id_dataset not in allowed_datasets: raise InsufficientRightsError( ('User "{}" cannot export dataset no "{}').format( info_role.id_role, id_dataset), 403) elif info_role.tag_object_code == '1': # join on TCounting, TOccurrence, Treleve and corRoleOccurrence # to get users q = q.outerjoin( CorCountingOccurrence, viewSINP.tableDef.columns.permId == CorCountingOccurrence.unique_id_sinp_occtax ).join( TOccurrencesOccurrence, CorCountingOccurrence.id_occurrence_occtax == TOccurrencesOccurrence.id_occurrence_occtax).join( TRelevesOccurrence, TOccurrencesOccurrence.id_releve_occtax == TRelevesOccurrence.id_releve_occtax).outerjoin( corRoleRelevesOccurrence, TRelevesOccurrence.id_releve_occtax == corRoleRelevesOccurrence.columns.id_releve_occtax) q = q.filter( or_( corRoleRelevesOccurrence.columns.id_role == info_role.id_role, TRelevesOccurrence.id_digitiser == info_role.id_role)) q = q.filter(viewSINP.tableDef.columns.jddId == str(uuid_dataset)) data = q.all() export_columns = blueprint.config['export_columns'] file_name = datetime.datetime.now().strftime('%Y-%m-%d-%Hh%Mm%S') return (filemanager.removeDisallowedFilenameChars(file_name), [viewSINP.as_dict(d) for d in data], export_columns, ';')
def user_is_in_dataset_actor(self, user): only_user = user.value_filter == "1" return self.id_dataset in TDatasets.get_user_datasets( user, only_user=only_user)