def get_taxon_tree(): """Get taxon tree. .. :quickref: Synthese; """ taxon_tree_table = GenericTable(tableName="v_tree_taxons_synthese", schemaName="gn_synthese", engine=DB.engine) data = DB.session.query(taxon_tree_table.tableDef).all() return [taxon_tree_table.as_dict(d) for d in data]
def export_metadata(info_role): """Route to export the metadata in CSV .. :quickref: Synthese; The table synthese is join with gn_synthese.v_metadata_for_export The column jdd_id is mandatory in the view gn_synthese.v_metadata_for_export POST parameters: Use a list of id_synthese (in POST parameters) to filter the v_synthese_for_export_view """ if request.json: filters = request.json elif request.data: # decode byte to str - compat python 3.5 filters = json.loads(request.data.decode("utf-8")) else: filters = { key: request.args.getlist(key) for key, value in request.args.items() } metadata_view = GenericTable(tableName="v_metadata_for_export", schemaName="gn_synthese", engine=DB.engine) q = DB.session.query(distinct(VSyntheseForWebApp.id_dataset), metadata_view.tableDef).join( metadata_view.tableDef, getattr( metadata_view.tableDef.columns, current_app.config["SYNTHESE"] ["EXPORT_METADATA_ID_DATASET_COL"], ) == VSyntheseForWebApp.id_dataset, ) q = select( [distinct(VSyntheseForWebApp.id_dataset), metadata_view.tableDef]) synthese_query_class = SyntheseQuery(VSyntheseForWebApp, q, filters) synthese_query_class.add_join( metadata_view.tableDef, getattr( metadata_view.tableDef.columns, current_app.config["SYNTHESE"]["EXPORT_METADATA_ID_DATASET_COL"], ), VSyntheseForWebApp.id_dataset) synthese_query_class.filter_query_all_filters(info_role) data = DB.engine.execute(synthese_query_class.query) return to_csv_resp( datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S"), data=[metadata_view.as_dict(d) for d in data], separator=";", columns=[db_col.key for db_col in metadata_view.tableDef.columns], )
def get_attribution_result(params): columns = GenericTable('v_custom_result_attribution', 'oeasc_chasse', DB.engine).tableDef.columns query = (DB.session.query( func.count(columns.id_attribution), func.count( columns.id_attribution).filter(columns.id_realisation != None))) for filter_key, filter_value in params.items(): if not hasattr(columns, filter_key) or filter_value in [None, []]: continue print(filter_key, filter_value) if isinstance(filter_value, list): query = query.filter( getattr(columns, filter_key).in_(filter_value)) else: query = query.filter( getattr(columns, filter_key) == (filter_value)) res = query.one() return { "nb_realisation": res[1], "nb_attribution": res[0], "taux_realisation": 0 if not res[1] else round(res[1] / res[0] * 100) }
def result_custom(params): print('custom') schema_name = params['view'].split('.')[0] table_name = params['view'].split('.')[1] if not cache_generic_table.get(params['view']): print('get view') cache_generic_table[params['view']] = GenericTable(table_name, schema_name, DB.engine) view = cache_generic_table.get(params['view']) query = DB.session.query(getattr(view.tableDef.columns, params['field_name']), func.count('*')) # filter for filter_key, filter_value in params.get('filters', {}).items(): query = query.filter(getattr(view.tableDef.columns, filter_key).in_(filter_value)) group_bys = [ params['field_name'] ] order_by = 'COUNT(*) DESC' if params['sort']: field_sort = params['sort'].replace('+', '') if field_sort != params['field_name']: group_bys.append(field_sort) order_by = field_sort if '+' == params['sort'][-1]: order_by += " DESC" query = query.group_by(text(', '.join(group_bys))) query = query.order_by(text(order_by)) res = query.all() return [ {'text': r[0], 'count': r[1] } for r in res ]
def export_metadata(info_role): """Route to export the metadata in CSV .. :quickref: Synthese; The table synthese is join with gn_synthese.v_metadata_for_export The column jdd_id is mandatory in the view gn_synthese.v_metadata_for_export POST parameters: Use a list of id_synthese (in POST parameters) to filter the v_synthese_for_export_view """ filters = {key: request.args.getlist(key) for key, value in request.args.items()} metadata_view = GenericTable( tableName="v_metadata_for_export", schemaName="gn_synthese", engine=DB.engine ) q = DB.session.query( distinct(VSyntheseForWebApp.id_dataset), metadata_view.tableDef ).join( metadata_view.tableDef, getattr( metadata_view.tableDef.columns, current_app.config["SYNTHESE"]["EXPORT_METADATA_ID_DATASET_COL"], ) == VSyntheseForWebApp.id_dataset, ) q = synthese_query.filter_query_all_filters( VSyntheseForWebApp, q, filters, info_role ) return to_csv_resp( datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S"), data=[metadata_view.as_dict(d) for d in q.all()], separator=";", columns=[db_col.key for db_col in metadata_view.tableDef.columns], )
def get_one_synthese(id_synthese): """Get one synthese record for web app with all decoded nomenclature .. :quickref: Synthese; Get one synthese It returns a dict composed of the following:: 'data' dict: Array of dict (with geojson key) 'nb_total' int: Number of observations 'nb_obs_limited' bool: Is number of observations capped :param int id_synthese:Synthese to be queried :>jsonarr array synthese_as_dict: One synthese with geojson key, see above """ metadata_view = GenericTable( tableName="v_metadata_for_export", schemaName="gn_synthese", engine=DB.engine ) q = ( DB.session.query( SyntheseOneRecord, getattr( metadata_view.tableDef.columns, current_app.config["SYNTHESE"]["EXPORT_METADATA_ACTOR_COL"], ), ) .filter(SyntheseOneRecord.id_synthese == id_synthese) .outerjoin( metadata_view.tableDef, getattr( metadata_view.tableDef.columns, current_app.config["SYNTHESE"]["EXPORT_METADATA_ID_DATASET_COL"], ) == SyntheseOneRecord.id_dataset, ) ) try: data = q.one() synthese_as_dict = data[0].as_dict( depth=2, ) synthese_as_dict["actors"] = data[1] return jsonify(synthese_as_dict) except exc.NoResultFound: return None
def get_chasse_bilan(params): columns = GenericTable('v_pre_bilan_pretty', 'oeasc_chasse', DB.engine).tableDef.columns localisation = ('zone_indicative' if params['id_zone_indicative'] else 'zone_cynegetique' if params['id_zone_cynegetique'] else 'secteur' if params['id_secteur'] else "") localisation_id_key = 'id_{}'.format(localisation) localisation_name_key = 'nom_{}'.format(localisation or 'espece') localisation_keys = (params['id_zone_indicative'] or params['id_zone_cynegetique'] or params['id_secteur'] or []) suffix = ('_zi' if params['id_zone_indicative'] else '_zc' if params['id_zone_cynegetique'] else '_secteur' if params['id_secteur'] else '_espece') res_keys = [ 'nb_realisation{}'.format(suffix), 'nb_realisation_avant_11{}'.format(suffix), 'nb_attribution_min{}'.format(suffix), 'nb_attribution_max{}'.format(suffix), ] name_keys = [ 'nom_espece', 'nom_saison', ] localisation_name_keys = ([localisation_name_key] if localisation_name_key else []) scope = (list( map(lambda k: (columns[k]), res_keys + name_keys + localisation_name_keys))) res = (DB.session.query(*scope).filter( columns['id_espece'] == params['id_espece'])) if localisation: res = res.filter(columns[localisation_id_key].in_(localisation_keys)) res = res.order_by(columns['nom_saison']) res = res.group_by(*(map(lambda k: columns[k], res_keys + name_keys + localisation_name_keys))) res = res.subquery() scope2 = (list(map(lambda k: func.sum(res.columns[k]), res_keys)) + list(map(lambda k: res.columns[k], name_keys))) if localisation_name_key: scope2.append(func.string_agg(res.columns[localisation_name_key], ', ')) res2 = (DB.session.query(*scope2).group_by( *(map(lambda k: res.columns[k], name_keys))).order_by( res.columns['nom_saison'])) res2 = res2.all() res = res2 if not res: return None out = {} query_keys = res_keys + name_keys for index, key in enumerate(res_keys): out[key.replace(suffix, '')] = [[ r[query_keys.index('nom_saison')], (int(r[index]) if r[index] is not None else 0) ] for r in res] out['taux_realisation'] = [[ out['nb_realisation'][i][0], out['nb_realisation'][i][1] / out['nb_attribution_max'][i][1] ] for i in range(len(out['nb_realisation']))] for key in name_keys: try: out[key] = res[0][query_keys.index(key)] if query_keys.index( key) else None except ValueError: pass if params['id_zone_indicative']: out['nom_zone_indicative'] = res[0][-1] elif params['id_zone_cynegetique']: out['nom_zone_cynegetique'] = res[0][-1] elif params['id_secteur']: out['nom_secteur'] = res[0][-1] return out
def export_taxon_web(info_role): """Optimized route for taxon web export. .. :quickref: Synthese; This view is customisable by the administrator Some columns are mandatory: cd_ref POST parameters: Use a list of cd_ref (in POST parameters) to filter the v_synthese_taxon_for_export_view :query str export_format: str<'csv'> """ taxon_view = GenericTable( tableName="v_synthese_taxon_for_export_view", schemaName="gn_synthese", engine=DB.engine ) columns = taxon_view.tableDef.columns # Test de conformité de la vue v_synthese_for_export_view try: assert hasattr(taxon_view.tableDef.columns, "cd_ref") except AssertionError as e: return {"msg": """ View v_synthese_taxon_for_export_view must have a cd_ref column \n trace: {} """.format(str(e)) }, 500 id_list = request.get_json() # check R and E CRUVED to know if we filter with cruved cruved = cruved_scope_for_user_in_module( info_role.id_role, module_code="SYNTHESE" )[0] subq = DB.session.query( VSyntheseForWebApp.cd_ref, func.count(distinct( VSyntheseForWebApp.id_synthese )).label("nb_obs"), func.min(VSyntheseForWebApp.date_min).label("date_min"), func.max(VSyntheseForWebApp.date_max).label("date_max") ).filter( VSyntheseForWebApp.id_synthese.in_(id_list) ).group_by(VSyntheseForWebApp.cd_ref) if cruved["R"] > cruved["E"]: # filter on cruved specifying the column # id_dataset, id_synthese, id_digitiser # and observer in the v_synthese_for_export_view subq = synthese_query.filter_query_with_cruved( VSyntheseForWebApp, subq, info_role, id_synthese_column="id_synthese", id_dataset_column="id_dataset", observers_column="observers", id_digitiser_column="id_digitiser", with_generic_table=False, ) subq = subq.subquery() q = DB.session.query( *columns, subq.c.nb_obs, subq.c.date_min, subq.c.date_max ).join( subq, subq.c.cd_ref == columns.cd_ref ) return to_csv_resp( datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S"), data=serializeQuery(q.all(), q.column_descriptions), separator=";", columns=[db_col.key for db_col in columns] + ["nb_obs", "date_min", "date_max"] )