Ejemplo n.º 1
0
def export_metadata(info_role):
    """
        Route to export the metadata in CSV
        .. :quickref: Synthese;
        The table synthese is join with gn_synthese.v_metadata_for_export
        The column jdd_id is mandatory in the view gn_synthese.v_metadata_for_export

        POST parameters: Use a list of id_synthese (in POST parameters) to filter the v_synthese_for_export_view
    """
    filters = {key: request.args.getlist(key) for key, value in request.args.items()}

    metadata_view = GenericTable("v_metadata_for_export", "gn_synthese", None)
    q = DB.session.query(
        distinct(VSyntheseForWebApp.id_dataset), metadata_view.tableDef
    ).join(
        metadata_view.tableDef,
        getattr(
            metadata_view.tableDef.columns,
            current_app.config["SYNTHESE"]["EXPORT_METADATA_ID_DATASET_COL"],
        )
        == VSyntheseForWebApp.id_dataset,
    )

    q = synthese_query.filter_query_all_filters(
        VSyntheseForWebApp, q, filters, info_role
    )

    return to_csv_resp(
        datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S"),
        data=[metadata_view.as_dict(d) for d in q.all()],
        separator=";",
        columns=[db_col.key for db_col in metadata_view.tableDef.columns],
    )
Ejemplo n.º 2
0
def export_metadata(info_role):
    """
        Route to export the metadata in CSV
        The table synthese is join with gn_synthese.v_metadata_for_export
        The column jdd_id is mandatory in the view gn_synthese.v_metadata_for_export

        Parameters :
            Via POST: Use a list of id_synthese (in POST parameters) to filter the v_synthese_for_export_view
    """
    filters = {key: request.args.getlist(key) for key, value in request.args.items()}

    metadata_view = GenericTable("v_metadata_for_export", "gn_synthese", None)
    q = DB.session.query(
        distinct(VSyntheseForWebApp.id_dataset), metadata_view.tableDef
    ).join(
        metadata_view.tableDef,
        getattr(
            metadata_view.tableDef.columns,
            current_app.config["SYNTHESE"]["EXPORT_METADATA_ID_DATASET_COL"],
        )
        == VSyntheseForWebApp.id_dataset,
    )

    q = synthese_query.filter_query_all_filters(
        VSyntheseForWebApp, q, filters, info_role
    )

    return to_csv_resp(
        datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S"),
        data=[metadata_view.as_dict(d) for d in q.all()],
        separator=";",
        columns=[db_col.key for db_col in metadata_view.tableDef.columns],
    )
Ejemplo n.º 3
0
def export(info_role):
    export_view_name = blueprint.config["export_view_name"]
    export_geom_column = blueprint.config["export_geom_columns_name"]
    export_id_column_name = blueprint.config["export_id_column_name"]
    export_columns = blueprint.config["export_columns"]
    export_srid = blueprint.config["export_srid"]

    export_view = GenericTable(
        export_view_name, "pr_occtax", export_geom_column, export_srid
    )
    releve_repository = ReleveRepository(export_view)
    q = releve_repository.get_filtered_query(info_role, from_generic_table=True)
    q = get_query_occtax_filters(request.args, export_view, q, from_generic_table=True)

    data = q.all()

    file_name = datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S")
    file_name = filemanager.removeDisallowedFilenameChars(file_name)

    export_format = request.args["format"] if "format" in request.args else "geojson"
    if export_format == "csv":
        columns = (
            export_columns
            if len(export_columns) > 0
            else [db_col.key for db_col in export_view.db_cols]
        )
        return to_csv_resp(
            file_name, [export_view.as_dict(d) for d in data], columns, ";"
        )
    elif export_format == "geojson":
        results = FeatureCollection(
            [export_view.as_geofeature(d, columns=export_columns) for d in data]
        )
        return to_json_resp(
            results, as_file=True, filename=file_name, indent=4, extension="geojson"
        )
    else:
        try:
            filemanager.delete_recursively(
                str(ROOT_DIR / "backend/static/shapefiles"), excluded_files=[".gitkeep"]
            )
            db_cols = [
                db_col for db_col in export_view.db_cols if db_col.key in export_columns
            ]
            dir_path = str(ROOT_DIR / "backend/static/shapefiles")
            export_view.as_shape(
                db_cols=db_cols, data=data, dir_path=dir_path, file_name=file_name
            )

            return send_from_directory(dir_path, file_name + ".zip", as_attachment=True)

        except GeonatureApiError as e:
            message = str(e)

        return render_template(
            "error.html",
            error=message,
            redirect=current_app.config["URL_APPLICATION"] + "/#/occtax",
        )
Ejemplo n.º 4
0
def get_taxon_tree():
    """
    Get taxon tree
    .. :quickref: Synthese;
    """
    taxon_tree_table = GenericTable(
        "v_tree_taxons_synthese", "gn_synthese", geometry_field=None
    )
    data = DB.session.query(taxon_tree_table.tableDef).all()
    return [taxon_tree_table.as_dict(d) for d in data]
Ejemplo n.º 5
0
def get_one_synthese(id_synthese):
    """
        Get one synthese record for web app with all decoded nomenclature
        .. :quickref: Synthese;

        :params id_synthese:
        :type id_synthese: int
    """
    metadata_view = GenericTable("v_metadata_for_export", "gn_synthese", None)
    q = (
        DB.session.query(
            SyntheseOneRecord,
            getattr(
                metadata_view.tableDef.columns,
                current_app.config["SYNTHESE"]["EXPORT_METADATA_ACTOR_COL"],
            ),
        )
        .filter(SyntheseOneRecord.id_synthese == id_synthese)
        .join(
            metadata_view.tableDef,
            getattr(
                metadata_view.tableDef.columns,
                current_app.config["SYNTHESE"]["EXPORT_METADATA_ID_DATASET_COL"],
            )
            == SyntheseOneRecord.id_dataset,
        )
    )
    try:
        data = q.one()
        synthese_as_dict = data[0].as_dict(True)
        synthese_as_dict["actors"] = data[1]
        return synthese_as_dict
    except exc.NoResultFound:
        return None
Ejemplo n.º 6
0
def get_one_synthese(id_synthese):
    """Get one synthese record for web app with all decoded nomenclature

    .. :quickref: Synthese; Get one synthese

    It returns a dict composed of the following::

        'data' dict: Array of dict (with geojson key)
        'nb_total' int: Number of observations
        'nb_obs_limited' bool: Is number of observations capped

    :param int id_synthese:Synthese to be queried
    :>jsonarr array synthese_as_dict: One synthese with geojson key, see above
    """
    metadata_view = GenericTable("v_metadata_for_export", "gn_synthese", None)
    q = (DB.session.query(
        SyntheseOneRecord,
        getattr(
            metadata_view.tableDef.columns,
            current_app.config["SYNTHESE"]["EXPORT_METADATA_ACTOR_COL"],
        ),
    ).filter(SyntheseOneRecord.id_synthese == id_synthese).join(
        metadata_view.tableDef,
        getattr(
            metadata_view.tableDef.columns,
            current_app.config["SYNTHESE"]["EXPORT_METADATA_ID_DATASET_COL"],
        ) == SyntheseOneRecord.id_dataset,
    ))
    try:
        data = q.one()
        synthese_as_dict = data[0].as_dict(True)
        synthese_as_dict["actors"] = data[1]
        return synthese_as_dict
    except exc.NoResultFound:
        return None
Ejemplo n.º 7
0
def get_one_synthese(id_synthese):
    """
        Retourne un enregistrement de la synthese
        avec les nomenclatures décodées pour la webapp
    """
    metadata_view = GenericTable("v_metadata_for_export", "gn_synthese", None)
    q = (DB.session.query(
        SyntheseOneRecord,
        getattr(
            metadata_view.tableDef.columns,
            current_app.config["SYNTHESE"]["EXPORT_METADATA_ACTOR_COL"],
        ),
    ).filter(SyntheseOneRecord.id_synthese == id_synthese).join(
        metadata_view.tableDef,
        getattr(
            metadata_view.tableDef.columns,
            current_app.config["SYNTHESE"]["EXPORT_METADATA_ID_DATASET_COL"],
        ) == SyntheseOneRecord.id_dataset,
    ))
    try:
        data = q.one()
        synthese_as_dict = data[0].as_dict(True)
        synthese_as_dict["actors"] = data[1]
        return synthese_as_dict
    except exc.NoResultFound:
        return None
Ejemplo n.º 8
0
def generate_swagger_spec(id_export):
    """
        Fonction qui permet de générer dynamiquement
        les spécifications swagger d'un export
    """
    swagger_parameters = []
    try:
        export = Export.query.filter(Export.id == id_export).one()
    except (NoResultFound, EmptyDataSetError) as e:
        raise e

    export_table = GenericTable(tableName=export.view_name,
                                schemaName=export.schema_name,
                                geometry_field=export.geometry_field,
                                srid=export.geometry_srid)

    for column in export_table.tableDef.columns:
        type = {"type": "string"}
        if column.type.__class__.__name__ in SWAGGER_TYPE_COR:
            type = SWAGGER_TYPE_COR[column.type.__class__.__name__]
        swagger_parameters.append({
            "in": "query",
            "name": column.name,
            "description": column.comment,
            **type
        })
    general_params = [{
        "in": "query",
        "name": "limit",
        "type": "int",
        "description": "nombre maximum de résultats à retourner"
    }, {
        "in": "query",
        "name": "offset",
        "type": "int",
        "description": "numéro de page"
    }]
    return general_params + swagger_parameters
Ejemplo n.º 9
0
def get_taxon_tree():
    taxon_tree_table = GenericTable(
        "v_tree_taxons_synthese", "gn_synthese", geometry_field=None
    )
    data = DB.session.query(taxon_tree_table.tableDef).all()
    return [taxon_tree_table.as_dict(d) for d in data]
Ejemplo n.º 10
0
def export_observations_web(info_role):
    """
        Optimized route for observations web export
        This view is customisable by the administrator
        Some columns arer mandatory: id_sythese, geojson and geojson_local to generate the exported files

        Parameters :
            Via POST: Use a list of id_synthese (in POST parameters) to filter the v_synthese_for_export_view
            Via GET: 'export_format' str<'csv', 'geojson', 'shapefiles'>

    """
    params = request.args
    # set default to csv
    export_format = "csv"
    export_view = GenericTable(
        "v_synthese_for_export",
        "gn_synthese",
        "the_geom_local",
        current_app.config["LOCAL_SRID"],
    )
    if "export_format" in params:
        export_format = params["export_format"]

    # get list of id synthese from POST
    id_list = request.get_json()

    db_cols_for_shape = []
    columns_to_serialize = []
    # loop over synthese config to get the columns for export
    for db_col in export_view.db_cols:
        if db_col.key in current_app.config["SYNTHESE"]["EXPORT_COLUMNS"]:
            db_cols_for_shape.append(db_col)
            columns_to_serialize.append(db_col.key)

    q = DB.session.query(export_view.tableDef).filter(
        export_view.tableDef.columns.idSynthese.in_(id_list)
    )
    # check R and E CRUVED to know if we filter with cruved
    cruved = cruved_scope_for_user_in_module(info_role.id_role, module_code="SYNTHESE")[
        0
    ]
    if cruved["R"] > cruved["E"]:
        # filter on cruved specifying the column
        # id_dataset, id_synthese, id_digitiser and observer in the v_synthese_for_export_view
        q = synthese_query.filter_query_with_cruved(
            export_view.tableDef,
            q,
            info_role,
            id_synthese_column=current_app.config["SYNTHESE"]["EXPORT_ID_SYNTHESE_COL"],
            id_dataset_column=current_app.config["SYNTHESE"]["EXPORT_ID_DATASET_COL"],
            observers_column=current_app.config["SYNTHESE"]["EXPORT_OBSERVERS_COL"],
            id_digitiser_column=current_app.config["SYNTHESE"][
                "EXPORT_ID_DIGITISER_COL"
            ],
            with_generic_table=True,
        )
    results = q.limit(current_app.config["SYNTHESE"]["NB_MAX_OBS_EXPORT"])

    file_name = datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S")
    file_name = filemanager.removeDisallowedFilenameChars(file_name)

    # columns = [db_col.key for db_col in export_view.db_cols]

    if export_format == "csv":
        formated_data = [
            export_view.as_dict(d, columns=columns_to_serialize) for d in results
        ]
        return to_csv_resp(
            file_name, formated_data, separator=";", columns=columns_to_serialize
        )

    elif export_format == "geojson":
        features = []
        for r in results:
            geometry = ast.literal_eval(
                getattr(r, current_app.config["SYNTHESE"]["EXPORT_GEOJSON_4326_COL"])
            )
            feature = Feature(
                geometry=geometry,
                properties=export_view.as_dict(r, columns=columns_to_serialize),
            )
            features.append(feature)
        results = FeatureCollection(features)
        return to_json_resp(results, as_file=True, filename=file_name, indent=4)
    else:
        try:
            filemanager.delete_recursively(
                str(ROOT_DIR / "backend/static/shapefiles"), excluded_files=[".gitkeep"]
            )

            dir_path = str(ROOT_DIR / "backend/static/shapefiles")

            export_view.as_shape(
                db_cols=db_cols_for_shape,
                data=results,
                geojson_col=current_app.config["SYNTHESE"]["EXPORT_GEOJSON_LOCAL_COL"],
                dir_path=dir_path,
                file_name=file_name,
            )
            return send_from_directory(dir_path, file_name + ".zip", as_attachment=True)

        except GeonatureApiError as e:
            message = str(e)

        return render_template(
            "error.html",
            error=message,
            redirect=current_app.config["URL_APPLICATION"] + "/#/synthese",
        )
Ejemplo n.º 11
0
def export(info_role):
    export_view_name = blueprint.config["export_view_name"]
    export_geom_column = blueprint.config["export_geom_columns_name"]
    export_id_column_name = blueprint.config["export_id_column_name"]
    export_columns = blueprint.config["export_columns"]
    export_srid = blueprint.config["export_srid"]

    export_view = GenericTable(export_view_name, "pr_occtax",
                               export_geom_column, export_srid)
    releve_repository = ReleveRepository(export_view)
    q = releve_repository.get_filtered_query(info_role,
                                             from_generic_table=True)
    q = get_query_occtax_filters(request.args,
                                 export_view,
                                 q,
                                 from_generic_table=True)

    data = q.all()

    file_name = datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S")
    file_name = filemanager.removeDisallowedFilenameChars(file_name)

    export_format = request.args[
        "format"] if "format" in request.args else "geojson"
    if export_format == "csv":
        columns = (export_columns if len(export_columns) > 0 else
                   [db_col.key for db_col in export_view.db_cols])
        return to_csv_resp(file_name, [export_view.as_dict(d) for d in data],
                           columns, ";")
    elif export_format == "geojson":
        results = FeatureCollection([
            export_view.as_geofeature(d, columns=export_columns) for d in data
        ])
        return to_json_resp(results,
                            as_file=True,
                            filename=file_name,
                            indent=4,
                            extension="geojson")
    else:
        try:
            filemanager.delete_recursively(str(ROOT_DIR /
                                               "backend/static/shapefiles"),
                                           excluded_files=[".gitkeep"])
            db_cols = [
                db_col for db_col in export_view.db_cols
                if db_col.key in export_columns
            ]
            dir_path = str(ROOT_DIR / "backend/static/shapefiles")
            export_view.as_shape(db_cols=db_cols,
                                 data=data,
                                 dir_path=dir_path,
                                 file_name=file_name)

            return send_from_directory(dir_path,
                                       file_name + ".zip",
                                       as_attachment=True)

        except GeonatureApiError as e:
            message = str(e)

        return render_template(
            "error.html",
            error=message,
            redirect=current_app.config["URL_APPLICATION"] + "/#/occtax",
        )
Ejemplo n.º 12
0
def get_taxon_tree():
    taxon_tree_table = GenericTable('v_tree_taxons_synthese',
                                    'gn_synthese',
                                    geometry_field=None)
    data = DB.session.query(taxon_tree_table.tableDef).all()
    return [taxon_tree_table.as_dict(d) for d in data]
Ejemplo n.º 13
0
def export_all_habitats(info_role, export_format='csv',):
    """
        Download all stations
        The route is in post to avoid a too large query string

        .. :quickref: Occhab;

    """

    data = request.get_json()

    export_view = GenericTable(
        tableName="v_export_sinp",
        schemaName="pr_occhab",
        geometry_field=None,
        srid=current_app.config["LOCAL_SRID"],
    )

    file_name = datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S")
    file_name = filemanager.removeDisallowedFilenameChars(file_name)
    db_cols_for_shape = []
    columns_to_serialize = []
    for db_col in export_view.db_cols:
        if db_col.key in blueprint.config['EXPORT_COLUMS']:
            if db_col.key != 'geometry':
                db_cols_for_shape.append(db_col)
            columns_to_serialize.append(db_col.key)
    results = DB.session.query(export_view.tableDef).filter(
        export_view.tableDef.columns.id_station.in_(data['idsStation'])
    ).limit(
        blueprint.config['NB_MAX_EXPORT']
    )
    if export_format == 'csv':
        formated_data = [
            export_view.as_dict(d, columns=[]) for d in results
        ]
        return to_csv_resp(
            file_name, formated_data, separator=";", columns=columns_to_serialize
        )
    elif export_format == 'geojson':
        features = []
        for r in results:
            features.append(
                Feature(
                    geometry=json.loads(r.geojson),
                    properties=export_view.as_dict(
                        r, columns=columns_to_serialize)
                )
            )
        return to_json_resp(
            FeatureCollection(features),
            as_file=True,
            filename=file_name,
            indent=4
        )
    else:
        try:
            filemanager.delete_recursively(
                str(ROOT_DIR / "backend/static/shapefiles"), excluded_files=[".gitkeep"]
            )

            dir_path = str(ROOT_DIR / "backend/static/shapefiles")
            export_view.as_shape(
                db_cols=db_cols_for_shape,
                data=results,
                geojson_col="geojson",
                dir_path=dir_path,
                file_name=file_name,
            )
            return send_from_directory(dir_path, file_name + ".zip", as_attachment=True)
        except GeonatureApiError as e:
            message = str(e)

        return render_template(
            "error.html",
            error=message,
            redirect=current_app.config["URL_APPLICATION"] +
            "/#/" + blueprint.config['MODULE_URL'],
        )
Ejemplo n.º 14
0
def export(info_role):
    export_view_name = blueprint.config['export_view_name']
    export_geom_column = blueprint.config['export_geom_columns_name']
    export_id_column_name = blueprint.config['export_id_column_name']
    export_columns = blueprint.config['export_columns']
    export_srid = blueprint.config['export_srid']

    export_view = GenericTable(export_view_name, 'pr_occtax',
                               export_geom_column, export_srid)

    releve_repository = ReleveRepository(export_view)
    q = releve_repository.get_filtered_query(info_role,
                                             from_generic_table=True)
    q = get_query_occtax_filters(request.args,
                                 export_view,
                                 q,
                                 from_generic_table=True)

    data = q.all()

    file_name = datetime.datetime.now().strftime('%Y_%m_%d_%Hh%Mm%S')
    file_name = filemanager.removeDisallowedFilenameChars(file_name)

    export_format = request.args[
        'format'] if 'format' in request.args else 'geojson'
    if export_format == 'csv':
        columns = export_columns if len(export_columns) > 0 else [
            db_col.key for db_col in export_view.db_cols
        ]
        return to_csv_resp(file_name, [export_view.as_dict(d) for d in data],
                           columns, ';')
    elif export_format == 'geojson':
        results = FeatureCollection([
            export_view.as_geofeature(d, columns=export_columns) for d in data
        ])
        return to_json_resp(results,
                            as_file=True,
                            filename=file_name,
                            indent=4)
    else:
        try:
            filemanager.delete_recursively(str(ROOT_DIR /
                                               'backend/static/shapefiles'),
                                           excluded_files=['.gitkeep'])
            db_cols = [
                db_col for db_col in export_view.db_cols
                if db_col.key in export_columns
            ]
            dir_path = str(ROOT_DIR / 'backend/static/shapefiles')
            export_view.as_shape(
                db_cols=db_cols,
                data=data,
                dir_path=dir_path,
                file_name=file_name,
            )

            return send_from_directory(dir_path,
                                       file_name + '.zip',
                                       as_attachment=True)

        except GeonatureApiError as e:
            message = str(e)

        return render_template('error.html',
                               error=message,
                               redirect=current_app.config['URL_APPLICATION'] +
                               "/#/occtax")
Ejemplo n.º 15
0
def export_observations_web(info_role):
    """
        Optimized route for observations web export
        .. :quickref: Synthese;
        This view is customisable by the administrator
        Some columns arer mandatory: id_sythese, geojson and geojson_local to generate the exported files
        
        POST parameters: Use a list of id_synthese (in POST parameters) to filter the v_synthese_for_export_view
        
        :query str export_format: str<'csv', 'geojson', 'shapefiles'>

    """
    params = request.args
    # set default to csv
    export_format = "csv"
    export_view = GenericTable(
        "v_synthese_for_export",
        "gn_synthese",
        "the_geom_local",
        current_app.config["LOCAL_SRID"],
    )
    if "export_format" in params:
        export_format = params["export_format"]

    # get list of id synthese from POST
    id_list = request.get_json()

    db_cols_for_shape = []
    columns_to_serialize = []
    # loop over synthese config to get the columns for export
    for db_col in export_view.db_cols:
        if db_col.key in current_app.config["SYNTHESE"]["EXPORT_COLUMNS"]:
            db_cols_for_shape.append(db_col)
            columns_to_serialize.append(db_col.key)

    q = DB.session.query(export_view.tableDef).filter(
        export_view.tableDef.columns.idSynthese.in_(id_list)
    )
    # check R and E CRUVED to know if we filter with cruved
    cruved = cruved_scope_for_user_in_module(info_role.id_role, module_code="SYNTHESE")[
        0
    ]
    if cruved["R"] > cruved["E"]:
        # filter on cruved specifying the column
        # id_dataset, id_synthese, id_digitiser and observer in the v_synthese_for_export_view
        q = synthese_query.filter_query_with_cruved(
            export_view.tableDef,
            q,
            info_role,
            id_synthese_column=current_app.config["SYNTHESE"]["EXPORT_ID_SYNTHESE_COL"],
            id_dataset_column=current_app.config["SYNTHESE"]["EXPORT_ID_DATASET_COL"],
            observers_column=current_app.config["SYNTHESE"]["EXPORT_OBSERVERS_COL"],
            id_digitiser_column=current_app.config["SYNTHESE"][
                "EXPORT_ID_DIGITISER_COL"
            ],
            with_generic_table=True,
        )
    results = q.limit(current_app.config["SYNTHESE"]["NB_MAX_OBS_EXPORT"])

    file_name = datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S")
    file_name = filemanager.removeDisallowedFilenameChars(file_name)

    # columns = [db_col.key for db_col in export_view.db_cols]

    if export_format == "csv":
        formated_data = [
            export_view.as_dict(d, columns=columns_to_serialize) for d in results
        ]
        return to_csv_resp(
            file_name, formated_data, separator=";", columns=columns_to_serialize
        )

    elif export_format == "geojson":
        features = []
        for r in results:
            geometry = ast.literal_eval(
                getattr(r, current_app.config["SYNTHESE"]["EXPORT_GEOJSON_4326_COL"])
            )
            feature = Feature(
                geometry=geometry,
                properties=export_view.as_dict(r, columns=columns_to_serialize),
            )
            features.append(feature)
        results = FeatureCollection(features)
        return to_json_resp(results, as_file=True, filename=file_name, indent=4)
    else:
        try:
            filemanager.delete_recursively(
                str(ROOT_DIR / "backend/static/shapefiles"), excluded_files=[".gitkeep"]
            )

            dir_path = str(ROOT_DIR / "backend/static/shapefiles")

            export_view.as_shape(
                db_cols=db_cols_for_shape,
                data=results,
                geojson_col=current_app.config["SYNTHESE"]["EXPORT_GEOJSON_LOCAL_COL"],
                dir_path=dir_path,
                file_name=file_name,
            )
            return send_from_directory(dir_path, file_name + ".zip", as_attachment=True)

        except GeonatureApiError as e:
            message = str(e)

        return render_template(
            "error.html",
            error=message,
            redirect=current_app.config["URL_APPLICATION"] + "/#/synthese",
        )
Ejemplo n.º 16
0
def export_taxon_web(info_role):
    """Optimized route for taxon web export.

    .. :quickref: Synthese;

    This view is customisable by the administrator
    Some columns are mandatory: cd_ref

    POST parameters: Use a list of cd_ref (in POST parameters)
         to filter the v_synthese_taxon_for_export_view

    :query str export_format: str<'csv'>

    """

    taxon_view = GenericTable(
        "v_synthese_taxon_for_export_view",
        "gn_synthese",
        None
    )
    columns = taxon_view.tableDef.columns
    # Test de conformité de la vue v_synthese_for_export_view
    try:
        assert hasattr(taxon_view.tableDef.columns, "cd_ref")

    except AssertionError as e:
        return {"msg": """
                        View v_synthese_taxon_for_export_view
                        must have a cd_ref column \n
                        trace: {}
                        """.format(str(e))
                }, 500

    id_list = request.get_json()

    # check R and E CRUVED to know if we filter with cruved
    cruved = cruved_scope_for_user_in_module(
        info_role.id_role, module_code="SYNTHESE"
    )[0]

    subq = DB.session.query(
        VSyntheseForWebApp.cd_ref,
        func.count(distinct(
            VSyntheseForWebApp.id_synthese
        )).label("nb_obs"),
        func.min(VSyntheseForWebApp.date_min).label("date_min"),
        func.max(VSyntheseForWebApp.date_max).label("date_max")
    ).filter(
        VSyntheseForWebApp.id_synthese.in_(id_list)
    ).group_by(VSyntheseForWebApp.cd_ref)

    if cruved["R"] > cruved["E"]:
        # filter on cruved specifying the column
        # id_dataset, id_synthese, id_digitiser
        #   and observer in the v_synthese_for_export_view
        subq = synthese_query.filter_query_with_cruved(
            VSyntheseForWebApp,
            subq,
            info_role,
            id_synthese_column="id_synthese",
            id_dataset_column="id_dataset",
            observers_column="observers",
            id_digitiser_column="id_digitiser",
            with_generic_table=False,
        )
    subq = subq.subquery()

    q = DB.session.query(
        *columns,
        subq.c.nb_obs,
        subq.c.date_min,
        subq.c.date_max
    ).join(
        subq,
        subq.c.cd_ref == columns.cd_ref
    )

    return to_csv_resp(
        datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S"),
        data=serializeQuery(q.all(), q.column_descriptions),
        separator=";",
        columns=[db_col.key for db_col in columns] + ["nb_obs", "date_min", "date_max"]
    )
Ejemplo n.º 17
0
def export_sinp(info_role):
    """ Return the data (CSV) at SINP
        from pr_occtax.export_occtax_sinp view
        If no paramater return all the dataset allowed of the user
        params:
        - id_dataset : integer
        - uuid_dataset: uuid
    """
    viewSINP = GenericTable('export_occtax_dlb', 'pr_occtax', None)
    q = DB.session.query(viewSINP.tableDef)
    params = request.args
    allowed_datasets = TDatasets.get_user_datasets(info_role)
    # if params in empty and user not admin,
    #    get the data off all dataset allowed
    if not params.get('id_dataset') and not params.get('uuid_dataset'):
        if info_role.tag_object_code != '3':
            allowed_uuid = (str(TDatasets.get_uuid(id_dataset))
                            for id_dataset in allowed_datasets)
            q = q.filter(viewSINP.tableDef.columns.jddId.in_(allowed_uuid))
    # filter by dataset id or uuid
    else:
        if 'id_dataset' in params:
            id_dataset = int(params['id_dataset'])
            uuid_dataset = TDatasets.get_uuid(id_dataset)
        elif 'uuid_dataset' in params:
            id_dataset = TDatasets.get_id(params['uuid_dataset'])
            uuid_dataset = params['uuid_dataset']
        # if data_scope 1 or 2, check if the dataset requested is allorws
        if (info_role.tag_object_code == '1'
                or info_role.tag_object_code == '2'):
            if id_dataset not in allowed_datasets:
                raise InsufficientRightsError(
                    ('User "{}" cannot export dataset no "{}').format(
                        info_role.id_role, id_dataset), 403)
            elif info_role.tag_object_code == '1':
                # join on TCounting, TOccurrence, Treleve and corRoleOccurrence
                #   to get users
                q = q.outerjoin(
                    CorCountingOccurrence, viewSINP.tableDef.columns.permId ==
                    CorCountingOccurrence.unique_id_sinp_occtax
                ).join(
                    TOccurrencesOccurrence,
                    CorCountingOccurrence.id_occurrence_occtax ==
                    TOccurrencesOccurrence.id_occurrence_occtax).join(
                        TRelevesOccurrence,
                        TOccurrencesOccurrence.id_releve_occtax ==
                        TRelevesOccurrence.id_releve_occtax).outerjoin(
                            corRoleRelevesOccurrence,
                            TRelevesOccurrence.id_releve_occtax ==
                            corRoleRelevesOccurrence.columns.id_releve_occtax)
                q = q.filter(
                    or_(
                        corRoleRelevesOccurrence.columns.id_role ==
                        info_role.id_role,
                        TRelevesOccurrence.id_digitiser == info_role.id_role))
        q = q.filter(viewSINP.tableDef.columns.jddId == str(uuid_dataset))
    data = q.all()

    export_columns = blueprint.config['export_columns']

    file_name = datetime.datetime.now().strftime('%Y-%m-%d-%Hh%Mm%S')
    return (filemanager.removeDisallowedFilenameChars(file_name),
            [viewSINP.as_dict(d) for d in data], export_columns, ';')