Exemple #1
0
def general_stats(info_role):
    """Return stats about synthese.

    .. :quickref: Synthese;

        - nb of observations
        - nb of distinct species
        - nb of distinct observer
        - nb ob datasets
    """
    allowed_datasets = get_datasets_cruved(info_role)
    q = DB.session.query(
        func.count(Synthese.id_synthese),
        func.count(func.distinct(Synthese.cd_nom)),
        func.count(func.distinct(Synthese.observers)),
    )
    q = synthese_query.filter_query_with_cruved(Synthese, q, info_role)
    data = q.one()
    data = {
        "nb_data": data[0],
        "nb_species": data[1],
        "nb_observers": data[2],
        "nb_dataset": len(allowed_datasets),
    }
    return data
Exemple #2
0
def general_stats(info_role):
    """
    Return stats about synthese
        - nb of observations
        - nb of distinct species
        - nb of distinct observer
        - nb ob datasets
    """
    allowed_datasets = TDatasets.get_user_datasets(info_role)
    q = DB.session.query(
        func.count(Synthese.id_dataset),
        func.count(func.distinct(Synthese.cd_nom)),
        func.count(func.distinct(Synthese.observers)),
    )
    q = synthese_query.filter_query_with_cruved(Synthese, q, info_role)
    data = q.one()
    data = {
        "nb_data": data[0],
        "nb_species": data[1],
        "nb_observers": data[2],
        "nb_dataset": len(allowed_datasets),
    }
    return data
Exemple #3
0
def export_observations_web(info_role):
    """
        Optimized route for observations web export
        This view is customisable by the administrator
        Some columns arer mandatory: id_sythese, geojson and geojson_local to generate the exported files

        Parameters :
            Via POST: Use a list of id_synthese (in POST parameters) to filter the v_synthese_for_export_view
            Via GET: 'export_format' str<'csv', 'geojson', 'shapefiles'>

    """
    params = request.args
    # set default to csv
    export_format = "csv"
    export_view = GenericTable(
        "v_synthese_for_export",
        "gn_synthese",
        "the_geom_local",
        current_app.config["LOCAL_SRID"],
    )
    if "export_format" in params:
        export_format = params["export_format"]

    # get list of id synthese from POST
    id_list = request.get_json()

    db_cols_for_shape = []
    columns_to_serialize = []
    # loop over synthese config to get the columns for export
    for db_col in export_view.db_cols:
        if db_col.key in current_app.config["SYNTHESE"]["EXPORT_COLUMNS"]:
            db_cols_for_shape.append(db_col)
            columns_to_serialize.append(db_col.key)

    q = DB.session.query(export_view.tableDef).filter(
        export_view.tableDef.columns.idSynthese.in_(id_list)
    )
    # check R and E CRUVED to know if we filter with cruved
    cruved = cruved_scope_for_user_in_module(info_role.id_role, module_code="SYNTHESE")[
        0
    ]
    if cruved["R"] > cruved["E"]:
        # filter on cruved specifying the column
        # id_dataset, id_synthese, id_digitiser and observer in the v_synthese_for_export_view
        q = synthese_query.filter_query_with_cruved(
            export_view.tableDef,
            q,
            info_role,
            id_synthese_column=current_app.config["SYNTHESE"]["EXPORT_ID_SYNTHESE_COL"],
            id_dataset_column=current_app.config["SYNTHESE"]["EXPORT_ID_DATASET_COL"],
            observers_column=current_app.config["SYNTHESE"]["EXPORT_OBSERVERS_COL"],
            id_digitiser_column=current_app.config["SYNTHESE"][
                "EXPORT_ID_DIGITISER_COL"
            ],
            with_generic_table=True,
        )
    results = q.limit(current_app.config["SYNTHESE"]["NB_MAX_OBS_EXPORT"])

    file_name = datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S")
    file_name = filemanager.removeDisallowedFilenameChars(file_name)

    # columns = [db_col.key for db_col in export_view.db_cols]

    if export_format == "csv":
        formated_data = [
            export_view.as_dict(d, columns=columns_to_serialize) for d in results
        ]
        return to_csv_resp(
            file_name, formated_data, separator=";", columns=columns_to_serialize
        )

    elif export_format == "geojson":
        features = []
        for r in results:
            geometry = ast.literal_eval(
                getattr(r, current_app.config["SYNTHESE"]["EXPORT_GEOJSON_4326_COL"])
            )
            feature = Feature(
                geometry=geometry,
                properties=export_view.as_dict(r, columns=columns_to_serialize),
            )
            features.append(feature)
        results = FeatureCollection(features)
        return to_json_resp(results, as_file=True, filename=file_name, indent=4)
    else:
        try:
            filemanager.delete_recursively(
                str(ROOT_DIR / "backend/static/shapefiles"), excluded_files=[".gitkeep"]
            )

            dir_path = str(ROOT_DIR / "backend/static/shapefiles")

            export_view.as_shape(
                db_cols=db_cols_for_shape,
                data=results,
                geojson_col=current_app.config["SYNTHESE"]["EXPORT_GEOJSON_LOCAL_COL"],
                dir_path=dir_path,
                file_name=file_name,
            )
            return send_from_directory(dir_path, file_name + ".zip", as_attachment=True)

        except GeonatureApiError as e:
            message = str(e)

        return render_template(
            "error.html",
            error=message,
            redirect=current_app.config["URL_APPLICATION"] + "/#/synthese",
        )
Exemple #4
0
def export_observations_web(info_role):
    """Optimized route for observations web export.

    .. :quickref: Synthese;

    This view is customisable by the administrator
    Some columns are mandatory: id_synthese, geojson and geojson_local to generate the exported files

    POST parameters: Use a list of id_synthese (in POST parameters) to filter the v_synthese_for_export_view

    :query str export_format: str<'csv', 'geojson', 'shapefiles'>

    """
    params = request.args
    # set default to csv
    export_format = "csv"
    export_view = GenericTableGeo(
        tableName="v_synthese_for_export",
        schemaName="gn_synthese",
        engine=DB.engine,
        geometry_field=None,
        srid=current_app.config["LOCAL_SRID"],
    )
    if "export_format" in params:
        export_format = params["export_format"]

    # get list of id synthese from POST
    id_list = request.get_json()

    db_cols_for_shape = []
    columns_to_serialize = []
    # loop over synthese config to get the columns for export
    for db_col in export_view.db_cols:
        if db_col.key in current_app.config["SYNTHESE"]["EXPORT_COLUMNS"]:
            db_cols_for_shape.append(db_col)
            columns_to_serialize.append(db_col.key)

    q = DB.session.query(export_view.tableDef).filter(
        export_view.tableDef.columns[
            current_app.config["SYNTHESE"]["EXPORT_ID_SYNTHESE_COL"]
        ].in_(id_list)
    )
    # check R and E CRUVED to know if we filter with cruved
    cruved = cruved_scope_for_user_in_module(info_role.id_role, module_code="SYNTHESE")[
        0
    ]
    if cruved["R"] > cruved["E"]:
        # filter on cruved specifying the column
        # id_dataset, id_synthese, id_digitiser and observer in the v_synthese_for_export_view
        q = synthese_query.filter_query_with_cruved(
            export_view.tableDef,
            q,
            info_role,
            id_synthese_column=current_app.config["SYNTHESE"]["EXPORT_ID_SYNTHESE_COL"],
            id_dataset_column=current_app.config["SYNTHESE"]["EXPORT_ID_DATASET_COL"],
            observers_column=current_app.config["SYNTHESE"]["EXPORT_OBSERVERS_COL"],
            id_digitiser_column=current_app.config["SYNTHESE"][
                "EXPORT_ID_DIGITISER_COL"
            ],
            with_generic_table=True,
        )
    results = q.limit(current_app.config["SYNTHESE"]["NB_MAX_OBS_EXPORT"])

    file_name = datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S")
    file_name = filemanager.removeDisallowedFilenameChars(file_name)

    # columns = [db_col.key for db_col in export_view.db_cols]

    if export_format == "csv":
        formated_data = [
            export_view.as_dict(d, columns=columns_to_serialize) for d in results
        ]
        return to_csv_resp(
            file_name, formated_data, separator=";", columns=columns_to_serialize
        )

    elif export_format == "geojson":
        features = []
        for r in results:
            geometry = ast.literal_eval(
                getattr(
                    r, current_app.config["SYNTHESE"]["EXPORT_GEOJSON_4326_COL"])
            )
            feature = Feature(
                geometry=geometry,
                properties=export_view.as_dict(
                    r, columns=columns_to_serialize),
            )
            features.append(feature)
        results = FeatureCollection(features)
        return to_json_resp(results, as_file=True, filename=file_name, indent=4)
    else:
        try:
            filemanager.delete_recursively(
                str(ROOT_DIR / "backend/static/shapefiles"), excluded_files=[".gitkeep"]
            )

            dir_path = str(ROOT_DIR / "backend/static/shapefiles")

            export_view.as_shape(
                db_cols=db_cols_for_shape,
                data=results,
                geojson_col=current_app.config["SYNTHESE"]["EXPORT_GEOJSON_LOCAL_COL"],
                dir_path=dir_path,
                file_name=file_name,
            )
            return send_from_directory(dir_path, file_name + ".zip", as_attachment=True)

        except GeonatureApiError as e:
            message = str(e)

        return render_template(
            "error.html",
            error=message,
            redirect=current_app.config["URL_APPLICATION"] + "/#/synthese",
        )
Exemple #5
0
def export_taxon_web(info_role):
    """Optimized route for taxon web export.

    .. :quickref: Synthese;

    This view is customisable by the administrator
    Some columns are mandatory: cd_ref

    POST parameters: Use a list of cd_ref (in POST parameters)
         to filter the v_synthese_taxon_for_export_view

    :query str export_format: str<'csv'>

    """

    taxon_view = GenericTable(
        tableName="v_synthese_taxon_for_export_view",
        schemaName="gn_synthese",
        engine=DB.engine
    )
    columns = taxon_view.tableDef.columns
    # Test de conformité de la vue v_synthese_for_export_view
    try:
        assert hasattr(taxon_view.tableDef.columns, "cd_ref")

    except AssertionError as e:
        return {"msg": """
                        View v_synthese_taxon_for_export_view
                        must have a cd_ref column \n
                        trace: {}
                        """.format(str(e))
                }, 500

    id_list = request.get_json()

    # check R and E CRUVED to know if we filter with cruved
    cruved = cruved_scope_for_user_in_module(
        info_role.id_role, module_code="SYNTHESE"
    )[0]

    subq = DB.session.query(
        VSyntheseForWebApp.cd_ref,
        func.count(distinct(
            VSyntheseForWebApp.id_synthese
        )).label("nb_obs"),
        func.min(VSyntheseForWebApp.date_min).label("date_min"),
        func.max(VSyntheseForWebApp.date_max).label("date_max")
    ).filter(
        VSyntheseForWebApp.id_synthese.in_(id_list)
    ).group_by(VSyntheseForWebApp.cd_ref)

    if cruved["R"] > cruved["E"]:
        # filter on cruved specifying the column
        # id_dataset, id_synthese, id_digitiser
        #   and observer in the v_synthese_for_export_view
        subq = synthese_query.filter_query_with_cruved(
            VSyntheseForWebApp,
            subq,
            info_role,
            id_synthese_column="id_synthese",
            id_dataset_column="id_dataset",
            observers_column="observers",
            id_digitiser_column="id_digitiser",
            with_generic_table=False,
        )
    subq = subq.subquery()

    q = DB.session.query(
        *columns,
        subq.c.nb_obs,
        subq.c.date_min,
        subq.c.date_max
    ).join(
        subq,
        subq.c.cd_ref == columns.cd_ref
    )

    return to_csv_resp(
        datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S"),
        data=serializeQuery(q.all(), q.column_descriptions),
        separator=";",
        columns=[db_col.key for db_col in columns] +
        ["nb_obs", "date_min", "date_max"]
    )
Exemple #6
0
def filter_query_all_filters(model, q, filters, user):
    """
    Return a query filtered with the cruved and all
    the filters available in the synthese form
    parameters:
        - q (SQLAchemyQuery): an SQLAchemy query
        - filters (dict): a dict of filter
        - user (User): a user object from User
        - allowed datasets (List<int>): an array of ID dataset where the users have autorization

    """
    q = filter_query_with_cruved(model, q, user)

    if "observers" in filters:
        q = q.filter(model.observers.ilike("%" + filters.pop("observers")[0] + "%"))

    if "date_min" in filters:
        q = q.filter(model.date_min >= filters.pop("date_min")[0])

    if "date_max" in filters:
        q = q.filter(model.date_min <= filters.pop("date_max")[0])

    if "id_acquisition_frameworks" in filters:
        q = q.join(
            TAcquisitionFramework,
            model.id_acquisition_framework
            == TAcquisitionFramework.id_acquisition_framework,
        )
        q = q.filter(
            TAcquisitionFramework.id_acquisition_framework.in_(
                filters.pop("id_acquisition_frameworks")
            )
        )

    if "geoIntersection" in filters:
        # Insersect with the geom send from the map
        ors = []
        for str_wkt in filters["geoIntersection"]:
            # if the geom is a circle
            if "radius" in filters:
                radius = filters.pop("radius")[0]
                wkt = circle_from_point(wkt, float(radius))
            else:
                wkt = loads(str_wkt)
            geom_wkb = from_shape(wkt, srid=4326)
            ors.append(model.the_geom_4326.ST_Intersects(geom_wkb))

        q = q.filter(or_(*ors))
        filters.pop("geoIntersection")

    if "period_start" in filters and "period_end" in filters:
        period_start = filters.pop("period_start")[0]
        period_end = filters.pop("period_end")[0]
        q = q.filter(
            or_(
                func.gn_commons.is_in_period(
                    func.date(model.date_min),
                    func.to_date(period_start, "DD-MM"),
                    func.to_date(period_end, "DD-MM"),
                ),
                func.gn_commons.is_in_period(
                    func.date(model.date_max),
                    func.to_date(period_start, "DD-MM"),
                    func.to_date(period_end, "DD-MM"),
                ),
            )
        )
    q, filters = filter_taxonomy(model, q, filters)

    # generic filters
    join_on_cor_area = False
    for colname, value in filters.items():
        if colname.startswith("area"):
            if not join_on_cor_area:
                q = q.join(
                    CorAreaSynthese, CorAreaSynthese.id_synthese == model.id_synthese
                )
            q = q.filter(CorAreaSynthese.id_area.in_(value))
            join_on_cor_area = True
        elif colname.startswith("modif_since_validation"):
            q = q.filter(model.meta_update_date > model.validation_date)
        else:
            col = getattr(model.__table__.columns, colname)
            q = q.filter(col.in_(value))
    return q