Beispiel #1
0
def detailed_ticket_count(account, max_months_ago=None):
    """ Returns a thing """
    tickets = Session.query(func.date_part('year', Ticket.created_at),
                            func.date_part('month', Ticket.created_at),
                            count(Ticket.id))\
                     .join(TicketOrder, Order).filter(Order.account_id==account.id)\
                     .group_by(func.date_part('year', Ticket.created_at),
                               func.date_part('month', Ticket.created_at))

    if max_months_ago is not None:
        past_date = datetime.datetime.utcnow() - datetime.timedelta(
            days=(max_months_ago * 365) / 12)
        past_date = datetime.date(year=past_date.year,
                                  month=past_date.month,
                                  day=1)  # set to beginning of month
        tickets = tickets.filter(Ticket.created_at >= past_date)

    statistic_tuples = tickets.all()  # [[year, month, amount], ..]
    result = dict()
    for year, month, amount in statistic_tuples:
        year = int(year)
        month = int(month)
        amount = int(amount)
        if year in result:
            if not month in result[year]:
                result[year][month] = amount
            else:
                result[year][month] += amount
        else:
            result[year] = {month: amount}
    return result
def get_zprospect():
    """
    Retourne toutes les zones de prospection du module
    """

    id_type_commune = blueprint.config["id_type_commune"]
    parameters = request.args
    q = (DB.session.query(TZprospect, Taxref,
                          func.string_agg(LAreas.area_name, ", ")).outerjoin(
                              Taxref,
                              TZprospect.cd_nom == Taxref.cd_nom).outerjoin(
                                  CorZpArea,
                                  CorZpArea.indexzp == TZprospect.indexzp).
         outerjoin(CorZpObs, CorZpObs.indexzp == TZprospect.indexzp).outerjoin(
             User, User.id_role == CorZpObs.id_role).outerjoin(
                 BibOrganismes,
                 BibOrganismes.id_organisme == User.id_organisme).outerjoin(
                     LAreas,
                     and_(LAreas.id_area == CorZpArea.id_area,
                          LAreas.id_type == id_type_commune),
                 ).group_by(TZprospect, Taxref))
    if "indexzp" in parameters:
        q = q.filter(TZprospect.indexzp == parameters["indexzp"])

    if "cd_nom" in parameters:
        q = q.filter(Taxref.cd_nom == parameters["cd_nom"])

    if "commune" in parameters:
        q = q.filter(LAreas.area_name == parameters["commune"])

    if "organisme" in parameters:
        q = q.filter(BibOrganismes.nom_organisme == parameters["organisme"])

    if "year" in parameters:
        q = q.filter(
            func.date_part("year", TZprospect.date_min) == parameters["year"])

    data = q.all()
    features = []

    for d in data:
        feature = d[0].get_geofeature(
            recursif=False,
            columns=["indexzp", "date_min", "date_max", "cd_nom"])
        id_zp = feature["properties"]["indexzp"]
        feature["properties"]["taxon"] = d[1].as_dict(["nom_valide"])
        features.append(feature)
    return FeatureCollection(features)
def check_year_visit(id_base_site, new_visit_date):
    """
    Check if there is already a visit of the same year.
    If yes, observer is not allowed to post the new visit
    """
    q_year = DB.session.query(
        func.date_part('year', TBaseVisits.visit_date_min)).filter(
            TBaseVisits.id_base_site == id_base_site)
    tab_old_year = q_year.all()
    print(tab_old_year)
    year_new_visit = new_visit_date[0:4]

    for y in tab_old_year:
        year_old_visit = str(int(y[0]))
        if year_old_visit == year_new_visit:
            DB.session.rollback()
            raise PostYearError(
                ('Maille {} has already been visited in {} ').format(
                    id_base_site, year_old_visit), 403)
Beispiel #4
0
def export_visit(info_role):
    '''
    Télécharge les données d'une visite (ou des visites )
    '''

    parameters = request.args
    export_format = parameters[
        'export_format'] if 'export_format' in request.args else 'shapefile'

    file_name = datetime.datetime.now().strftime('%Y_%m_%d_%Hh%Mm%S')
    q = (DB.session.query(ExportVisits))

    if 'id_base_visit' in parameters:
        q = (DB.session.query(ExportVisits).filter(
            ExportVisits.idbvisit == parameters['id_base_visit']))
    elif 'id_releve_plot' in parameters:
        q = (DB.session.query(ExportVisits).filter(
            ExportVisits.idreleve == parameters['id_releve_plot']))
    elif 'id_base_site' in parameters:
        q = (DB.session.query(ExportVisits).filter(
            ExportVisits.idbsite == parameters['id_base_site']))
    elif 'organisme' in parameters:
        q = (DB.session.query(ExportVisits).filter(
            ExportVisits.organisme == parameters['organisme']))
    elif 'year' in parameters:
        q = (DB.session.query(ExportVisits).filter(
            func.date_part('year', ExportVisits.visitdate) ==
            parameters['year']))
    elif 'cd_hab' in parameters:
        q = (DB.session.query(ExportVisits).filter(
            ExportVisits.cd_hab == parameters['cd_hab']))

    data = q.all()
    features = []

    # formate data
    cor_hab_taxon = []
    flag_cdhab = 0

    strates = []
    tab_header = []
    column_name = get_base_column_name()
    column_name_pro = get_pro_column_name()
    mapping_columns = get_mapping_columns()

    strates_list = get_stratelist_plot()

    tab_visit = []

    for d in data:
        visit = d.as_dict()

        # Get list hab/taxon
        cd_hab = visit['cd_hab']
        if flag_cdhab != cd_hab:
            cor_hab_taxon = get_taxonlist_by_cdhab(cd_hab)
            flag_cdhab = cd_hab

        # remove geom Type
        geom_wkt = to_shape(d.geom)
        geom_array = array(geom_wkt)
        visit['geom_wkt'] = geom_wkt
        if export_format == 'csv' or export_format == 'shapefile':
            visit['geom'] = d.geom
            if geom_wkt.type.lower() == 'linestring':
                visit['geom'] = str(geom_array[0]) + " / " + str(geom_array[1])

        # remove html tag
        visit['lbhab'] = striphtml(visit['lbhab'])

        # Translate label column
        visit = dict((mapping_columns[key], value)
                     for (key, value) in visit.items()
                     if key in mapping_columns)

        # pivot strate
        if visit['covstrate']:
            for strate, cover in visit['covstrate'].items():
                visit[strate] = " % " + str(cover)
        if 'covstrate' in visit:
            visit.pop('covstrate')

        # pivot taxons
        if visit['covtaxons']:
            for taxon, cover in visit['covtaxons'].items():
                visit[taxon] = " % " + str(cover)
        if 'covtaxons' in visit:
            visit.pop('covtaxons')

        tab_visit.append(visit)

    if export_format == 'geojson':

        for d in tab_visit:
            feature = mapping(d['geom_wkt'])
            d.pop('geom_wkt', None)
            properties = d
            features.append(feature)
            features.append(properties)
        result = FeatureCollection(features)

        return to_json_resp(result, as_file=True, filename=file_name, indent=4)

    elif export_format == 'csv':

        tab_header = column_name + [clean_string(x) for x in strates_list] + [
            clean_string(x) for x in cor_hab_taxon
        ] + column_name_pro

        return to_csv_resp(file_name, tab_visit, tab_header, ';')

    else:

        dir_path = str(ROOT_DIR / 'backend/static/shapefiles')

        FionaShapeService.create_shapes_struct(
            db_cols=ExportVisits.__mapper__.c,
            srid=4326,
            dir_path=dir_path,
            file_name=file_name,
        )

        for row in data:
            FionaShapeService.create_feature(row.as_dict(), row.geom)

        FionaShapeService.save_and_zip_shapefiles()

        return send_from_directory(dir_path,
                                   file_name + '.zip',
                                   as_attachment=True)
Beispiel #5
0
def get_sites_zp():
    '''
    Retourne la liste des ZP
    '''
    parameters = request.args
    id_type_commune = blueprint.config['id_type_commune']
    # grâce au fichier config
    q = (
        DB.session.query(
            TInfoSite, func.max(TBaseVisits.visit_date_min),
            Taxonomie.nom_complet,
            func.count(distinct(TBaseVisits.id_base_visit)),
            func.string_agg(distinct(BibOrganismes.nom_organisme), ', '),
            func.string_agg(LAreas.area_name, ', ')).outerjoin(
                TBaseVisits, TBaseVisits.id_base_site == TInfoSite.id_base_site
                # get taxonomy lb_nom
            ).outerjoin(
                Taxonomie, TInfoSite.cd_nom == Taxonomie.cd_nom
                # get organisms of a site
            ).outerjoin(
                corVisitObserver, corVisitObserver.c.id_base_visit ==
                TBaseVisits.id_base_visit).outerjoin(
                    User,
                    User.id_role == corVisitObserver.c.id_role).outerjoin(
                        BibOrganismes,
                        BibOrganismes.id_organisme == User.id_organisme)
        # get municipalities of a site
        .outerjoin(
            corSiteArea,
            corSiteArea.c.id_base_site == TInfoSite.id_base_site).outerjoin(
                LAreas,
                and_(LAreas.id_area == corSiteArea.c.id_area,
                     LAreas.id_type == id_type_commune)).group_by(
                         TInfoSite, Taxonomie.nom_complet))

    if 'id_base_site' in parameters:
        q = q.filter(TInfoSite.id_base_site == parameters['id_base_site'])

    if 'cd_nom' in parameters:
        q = q.filter(TInfoSite.cd_nom == parameters['cd_nom'])
    if 'organisme' in parameters:
        q = q.filter(BibOrganismes.nom_organisme == parameters['organisme'])
    if 'commune' in parameters:
        q = q.filter(LAreas.area_name == parameters['commune'])
    if 'year' in parameters:
        # relance la requête pour récupérer la date_max exacte si on filtre sur l'année
        q_year = (DB.session.query(
            TInfoSite.id_base_site,
            func.max(TBaseVisits.visit_date_min),
        ).outerjoin(
            TBaseVisits,
            TBaseVisits.id_base_site == TInfoSite.id_base_site).group_by(
                TInfoSite.id_base_site))

        data_year = q_year.all()

        q = q.filter(
            func.date_part('year', TBaseVisits.visit_date_min) ==
            parameters['year'])
    data = q.all()

    features = []
    for d in data:
        feature = d[0].get_geofeature()
        id_site = feature['properties']['base_site']['id_base_site']
        if 'year' in parameters:
            for dy in data_year:
                #  récupérer la bonne date max du site si on filtre sur année
                if id_site == dy[0]:
                    feature['properties']['date_max'] = str(dy[1])
        else:
            feature['properties']['date_max'] = str(d[1])
            if d[1] == None:
                feature['properties']['date_max'] = 'Aucune visite'
        feature['properties']['nom_taxon'] = str(d[2])
        feature['properties']['nb_visit'] = str(d[3])
        feature['properties']['organisme'] = str(d[4])
        feature['properties']['nom_commune'] = str(d[5])
        if d[4] == None:
            feature['properties']['organisme'] = 'Aucun'
        features.append(feature)
    return FeatureCollection(features)
Beispiel #6
0
def export_visit():
    '''
    Télécharge les données d'une visite (ou des visites )
    '''

    parameters = request.args
    # q = q.filter(TInfoSite.id_base_site == parameters['id_base_site'])

    export_format = parameters[
        'export_format'] if 'export_format' in request.args else 'shapefile'

    file_name = datetime.datetime.now().strftime('%Y_%m_%d_%Hh%Mm%S')
    q = (DB.session.query(ExportVisits))

    if 'id_base_visit' in parameters:
        q = (DB.session.query(ExportVisits).filter(
            ExportVisits.id_base_visit == parameters['id_base_visit']))
    elif 'id_base_site' in parameters:
        q = (DB.session.query(ExportVisits).filter(
            ExportVisits.id_base_site == parameters['id_base_site']))
    elif 'organisme' in parameters:
        q = (DB.session.query(ExportVisits).filter(
            ExportVisits.organisme == parameters['organisme']))
    elif 'commune' in parameters:
        q = (DB.session.query(ExportVisits).filter(
            ExportVisits.area_name == parameters['commune']))
    elif 'year' in parameters:
        q = (DB.session.query(ExportVisits).filter(
            func.date_part('year', ExportVisits.visit_date) ==
            parameters['year']))
    elif 'cd_nom' in parameters:
        q = (DB.session.query(ExportVisits).filter(
            ExportVisits.cd_nom == parameters['cd_nom']))

    data = q.all()
    features = []

    if export_format == 'geojson':
        for d in data:
            feature = d.as_geofeature('geom', 'id_area', False)
            features.append(feature)
        result = FeatureCollection(features)

        return to_json_resp(result, as_file=True, filename=file_name, indent=4)

    elif export_format == 'csv':
        tab_visit = []

        for d in data:
            visit = d.as_dict()
            geom_wkt = to_shape(d.geom)
            visit['geom'] = geom_wkt

            tab_visit.append(visit)

        return to_csv_resp(file_name, tab_visit, tab_visit[0].keys(), ';')

    else:

        dir_path = str(ROOT_DIR / 'backend/static/shapefiles')

        FionaShapeService.create_shapes_struct(
            db_cols=ExportVisits.__mapper__.c,
            srid=2154,
            dir_path=dir_path,
            file_name=file_name,
        )

        for row in data:
            FionaShapeService.create_feature(row.as_dict(), row.geom)

        FionaShapeService.save_and_zip_shapefiles()

        return send_from_directory(dir_path,
                                   file_name + '.zip',
                                   as_attachment=True)
Beispiel #7
0
            def makeParam(key, value):
                """ Takes a value list as input and concatenates with OR.
                This means that {age: [1, 12, 13]}  will yield a result if
                age == 1 OR age == 12 OR age == 13.
                """
                if not (isinstance(value, list) or isinstance(value, tuple)):
                    value = [value]

                or_clause = []
                # Ask for the type of the parameter according to the entity
                parameter_class = parameter_for_type(entity.declared_params[key])
                for val in value:
                    if callable(val):
                        # we’ve been given a function
                        or_clause.append(val(parameter_class.value))
                    elif parameter_class == StringParameter:
                        # test string using ‘like’
                        if not options["strict"]:
                            val = "%" + val + "%"

                        or_clause.append(parameter_class.value.like(val))
                    else:
                        if options["convert_string"]:
                            try:
                                val = parameter_class.from_string(val)
                            except StringConversionError:
                                if parameter_class == DateParameter:
                                    # Here, we want to match a certain YEAR, a certain
                                    # combination of YEAR-MONTH or a certain combination
                                    # YEAR-MONTH-DAY from a date.
                                    # Therefore, we need to extract YEAR, MONTH and DAY
                                    # from a date and match those separately.
                                    # Unfortunately, there is no common SQL function for
                                    # this task, so we're left with ``date_part('year', date)``
                                    # for Postgres and ``strftime('%Y', date)`` for Sqlite.
                                    # We check the `engine_name` and generate the respective
                                    # methods.

                                    # get year month day
                                    ymd = val.split('-')

                                    clauses = []

                                    from sqlalchemy.sql.expression import func
                                    if self.connection.engine_name == "postgresql":
                                        year_part = lambda value: func.date_part('year', value)
                                        month_part = lambda value: func.date_part('month', value)
                                        day_part = lambda value: func.date_part('day', value)
                                    elif self.connection.engine_name == "sqlite":
                                        year_part = lambda value: func.strftime('%Y', value)
                                        month_part = lambda value: func.strftime('%m', value)
                                        day_part = lambda value: func.strftime('%d', value)
                                    else:
                                        raise ValueError("Unsupported operation: Unknown engine name %r." %
                                                          self.connection.engine_name)

                                    if len(ymd) > 0:
                                         clauses.append(year_part(parameter_class.value) ==  ymd[0])
                                    if len(ymd) > 1:
                                         clauses.append(month_part(parameter_class.value) ==  ymd[1])
                                    if len(ymd) > 2:
                                         clauses.append(day_part(parameter_class.value) ==  ymd[2])

                                    clause = (and_(*clauses))
                                    or_clause.append(clause)
                                else:
                                    raise
                        else:
                            or_clause.append(parameter_class.value == val)
                # FIXME
                return entity._params.of_type(parameter_class).any(or_(*or_clause))
def export_ap():
    """
    Télécharge les données d'une aire de présence
    """

    parameters = request.args

    export_format = (parameters["export_format"]
                     if "export_format" in request.args else "shapefile")

    file_name = datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S")
    q = DB.session.query(ExportAp)

    if "indexap" in parameters:
        q = DB.session.query(ExportAp).filter(
            ExportAp.indexap == parameters["indexap"])
    elif "indexzp" in parameters:
        q = DB.session.query(ExportAp).filter(
            ExportAp.id_base_site == parameters["indexzp"])
    elif "organisme" in parameters:
        q = DB.session.query(ExportAp).filter(
            ExportAp.organisme == parameters["organisme"])
    elif "commune" in parameters:
        q = DB.session.query(ExportAp).filter(
            ExportAp.area_name == parameters["commune"])
    elif "year" in parameters:
        q = DB.session.query(ExportAp).filter(
            func.date_part("year", ExportAp.visit_date) == parameters["year"])
    elif "cd_nom" in parameters:
        q = DB.session.query(ExportAp).filter(
            ExportAp.cd_nom == parameters["cd_nom"])

    data = q.all()
    features = []

    if export_format == "geojson":

        for d in data:
            feature = d.as_geofeature("geom_local", "indexap", False)
            features.append(feature)
        result = FeatureCollection(features)

        return to_json_resp(result, as_file=True, filename=file_name, indent=4)

    elif export_format == "csv":
        tab_ap = []

        for d in data:
            ap = d.as_dict()
            geom_wkt = to_shape(d.geom_local)
            ap["geom_local"] = geom_wkt

            tab_ap.append(ap)

        return to_csv_resp(file_name, tab_ap, tab_ap[0].keys(), ";")

    else:

        dir_path = str(ROOT_DIR / "backend/static/shapefiles")

        FionaShapeService.create_shapes_struct(
            db_cols=ExportAp.__mapper__.c,
            srid=2154,
            dir_path=dir_path,
            file_name=file_name,
        )

        for row in data:
            FionaShapeService.create_feature(row.as_dict(), row.geom_local)

        FionaShapeService.save_and_zip_shapefiles()

        return send_from_directory(dir_path,
                                   file_name + ".zip",
                                   as_attachment=True)
Beispiel #9
0
def get_all_sites(info_role):
    '''
    Retourne tous les sites
    '''
    parameters = request.args

    id_type_commune = blueprint.config['id_type_commune']


    q = (
        DB.session.query(
            TInfosSite,
            func.max(TBaseVisits.visit_date_min),
            Habref.lb_hab_fr_complet,
            func.count(distinct(TBaseVisits.id_base_visit)),
            func.string_agg(distinct(BibOrganismes.nom_organisme), ', '),
            func.string_agg(distinct(LAreas.area_name), ', ')
            ).outerjoin(
            TBaseVisits, TBaseVisits.id_base_site == TInfosSite.id_base_site
            # get habitat cd_hab
            ).outerjoin(
                Habref, TInfosSite.cd_hab == Habref.cd_hab
            # get organisms of a site
            ).outerjoin(
                corVisitObserver, corVisitObserver.c.id_base_visit == TBaseVisits.id_base_visit
            ).outerjoin(
                User, User.id_role == corVisitObserver.c.id_role
            ).outerjoin(
                BibOrganismes, BibOrganismes.id_organisme == User.id_organisme
            )
            # get municipalities of a site
            .outerjoin(
                corSiteArea, corSiteArea.c.id_base_site == TInfosSite.id_base_site
            ).outerjoin(
                LAreas, and_(LAreas.id_area == corSiteArea.c.id_area, LAreas.id_type == id_type_commune)
            )
            .group_by(
                TInfosSite, Habref.lb_hab_fr_complet
            )
        )


    if 'cd_hab' in parameters:
        q = q.filter(TInfosSite.cd_hab == parameters['cd_hab'])
    
    if 'id_base_site' in parameters:
        q = q.filter(TInfosSite.id_base_site == parameters['id_base_site'])

    if 'organisme' in parameters:
        q = q.filter(BibOrganismes.id_organisme == parameters['organisme'])

    if 'commune' in parameters:
        q = q.filter(LAreas.area_name == parameters['commune'])

    if 'year' in parameters:
        # relance la requête pour récupérer la date_max exacte si on filtre sur l'année
        q_year = (
            DB.session.query(
                TInfosSite.id_base_site,
                func.max(TBaseVisits.visit_date_min),
            ).outerjoin(
                TBaseVisits, TBaseVisits.id_base_site == TInfosSite.id_base_site
            ).group_by(TInfosSite.id_base_site)
        )

        data_year = q_year.all()

        q = q.filter(func.date_part('year', TBaseVisits.visit_date_min) == parameters['year'])
    
    page = request.args.get('page', 1, type=int)
    items_per_page = blueprint.config['items_per_page']
    pagination_serverside = blueprint.config['pagination_serverside']

    if (pagination_serverside):
        pagination = q.paginate(page, items_per_page, False)
        data = pagination.items
        totalItmes = pagination.total
    else:
        totalItmes = 0
        data = q.all()

    pageInfo= {
        'totalItmes' : totalItmes,
        'items_per_page' : items_per_page,
    }
    features = []

    if data:
        for d in data:
            feature = d[0].get_geofeature()
            id_site = feature['properties']['id_base_site']
            base_site_code = feature['properties']['t_base_site']['base_site_code']
            base_site_description = feature['properties']['t_base_site']['base_site_description'] or 'Aucune description'
            base_site_name = feature['properties']['t_base_site']['base_site_name']
            if feature['properties']['t_base_site']:
                del feature['properties']['t_base_site']
            if 'year' in parameters:
                for dy in data_year:
                    #  récupérer la bonne date max du site si on filtre sur année
                    if id_site == dy[0]:
                        feature['properties']['date_max'] = str(d[1])
            else:
                feature['properties']['date_max'] = str(d[1])
                if d[1] == None:
                    feature['properties']['date_max'] = 'Aucune visite'
            feature['properties']['nom_habitat'] = str(d[2])
            feature['properties']['nb_visit'] = str(d[3])
            feature['properties']['organisme'] = str(d[4])
            feature['properties']['nom_commune'] = str(d[5])
            if d[4] == None:
                feature['properties']['organisme'] = 'Aucun'
            feature['properties']['base_site_code'] = base_site_code
            feature['properties']['base_site_description'] = base_site_description
            feature['properties']['base_site_name'] = base_site_name
            features.append(feature)

        return [pageInfo,FeatureCollection(features)]
    return None