Beispiel #1
0
    def status():

        record = inventory.query.add_columns(
            func.SUM(inventory.in_stock).label('in_stock'),
            func.SUM(inventory.given).label('given'),
            func.SUM(inventory.expired).label('expired')).all()

        return record
Beispiel #2
0
    def type():

        record = inventory.query.add_columns(
            inventory.type_id,
            func.SUM(inventory.in_stock).label('in_stock'),
            func.SUM(inventory.given).label('given'),
            func.SUM(inventory.expired).label('expired')).group_by(
                inventory.type_id).all()

        return record
Beispiel #3
0
    def relatValorDia(self):

        try:

            # Abrindo Sessao
            conecta = Conexao()
            sessao = conecta.Session()

            # Query
            row = (sessao.query(
                func.COALESCE(func.SUM(Venda.valor_recebido),
                              0).label('vendido'),
                func.COUNT(distinct(
                    Venda.id_cliente)).label('cliente')).filter(
                        Venda.data_emissao.between(self.dataEmissao,
                                                   self.dataFim)))
            row.all()

            # salvando resultado
            for query in row:
                self.valorRecebido = str(query.vendido).replace('.', ',')
                self.idCliente = query.cliente

            # Fechando a COnexao
            sessao.close()

        except IntegrityError as err:
            print(err)
    def detalheEntrada(self):

        try:

            # Abrindo Sessao
            conecta = Conexao()
            sessao = conecta.Session()

            # Query
            self.query = (sessao.query(
                func.SUM(ContaAReceber.valor_recebido).label('entrada'),
                CatAReceber.categoria_a_receber,
                FormaPagamento.forma_pagamento).join(CatAReceber).join(
                    FormaPagamento).filter(
                        ContaAReceber.data_recebimento.between(
                            self.dataRecebimento, self.dataFim)).group_by(
                                ContaAReceber.forma_pagamento,
                                ContaAReceber.categoria))

            # Convertendo variaveis em lista
            self.valorRecebido = []
            self.categoria = []
            self.formaPagamento = []

            # Salvando resultado em suas listas
            for row in self.query:
                self.categoria.append(row.categoria_a_receber)
                self.valorRecebido.append(row.entrada)
                self.formaPagamento.append(row.forma_pagamento)

            # Fechando a Conexao
            sessao.close()

        except IntegrityError as err:
            print(err)
    def __call__(self, user_ids, session):
        """
        Parameters:
            user_ids    : list of mediawiki user ids to restrict computation to
            session     : sqlalchemy session open on a mediawiki database

        Returns:
            {
                user id: 1 if they're a rolling new active editor, 0 otherwise
                for all cohort users, or all users that have edits in the time period
            }
        """
        number_of_edits = int(self.number_of_edits.data)
        rolling_days = int(self.rolling_days.data)
        end_date = self.end_date.data
        start_date = end_date - timedelta(days=rolling_days)

        newly_registered = session.query(Logging.log_user) \
            .filter(Logging.log_type == 'newusers') \
            .filter(Logging.log_action == 'create') \
            .filter(between(Logging.log_timestamp, start_date, end_date))

        filtered_new = self.filter(newly_registered,
                                   user_ids,
                                   column=Logging.log_user).subquery()

        rev_user = label('user_id', Revision.rev_user)
        ar_user = label('user_id', Archive.ar_user)
        count = label('count', func.count())

        revisions = session.query(rev_user, count)\
            .filter(between(Revision.rev_timestamp, start_date, end_date))\
            .filter(Revision.rev_user.in_(filtered_new))\
            .group_by(Revision.rev_user)

        archived = session.query(ar_user, count)\
            .filter(between(Archive.ar_timestamp, start_date, end_date))\
            .filter(Archive.ar_user.in_(filtered_new))\
            .group_by(Archive.ar_user)

        bot_user_ids = session.query(MediawikiUserGroups.ug_user)\
            .filter(MediawikiUserGroups.ug_group == 'bot')\
            .subquery()

        new_edits = revisions.union_all(archived).subquery()
        new_edits_by_user = session.query(new_edits.c.user_id)\
            .filter(new_edits.c.user_id.notin_(bot_user_ids))\
            .group_by(new_edits.c.user_id)\
            .having(func.SUM(new_edits.c.count) >= number_of_edits)

        metric_results = {r[0]: {self.id: 1} for r in new_edits_by_user.all()}

        if user_ids is None:
            return metric_results
        else:
            return {
                uid: metric_results.get(uid, self.default_result)
                for uid in user_ids
            }
Beispiel #6
0
 def get_total_amount_staked(self):
     try:
         query_response = self.session.query(
             func.SUM(StakeHolderDBModel.amount_approved).label(
                 "total_amount_approved"),
             func.SUM(StakeHolderDBModel.amount_pending_for_approval).label(
                 "total_amount_pending_for_approval")).one()
         self.session.commit()
     except Exception as e:
         self.session.rollback()
         raise e
     total_amount_approved = 0
     total_amount_pending_for_approval = 0
     if query_response.total_amount_approved or query_response.total_amount_pending_for_approval:
         total_amount_approved = int(query_response.total_amount_approved)
         total_amount_pending_for_approval = int(
             query_response.total_amount_pending_for_approval)
     total_amount_staked = total_amount_approved + total_amount_pending_for_approval
     return total_amount_staked
def get_aum_from_user_sta_table(hold_date):
    """
    从用户统计表里获取某天总的在管资产
    :return: list
    """
    t = Table('user_statistics', metadata, autoload=True)
    # Session = sessionmaker(bind=db)
    # session = Session()
    rst = session.query(func.SUM(t.c.us_holding_amount)).filter( \
                                    t.c.us_date == hold_date)
    return rst.all()
Beispiel #8
0
    def movDespesa(self):

        try:

            # Abrindo Sessao
            conecta = Conexao()
            sessao = conecta.Session()

            # Query
            row = (sessao.query(func.COALESCE(
                func.SUM(ContaAPagar.valor_pago), 0
            ).label('valorPago'))

                .filter(ContaAPagar.data_pagamento.between(
                    self.dataPagamento, self.dataFim))
            )
            row.all()

            # Salvando resultado
            for row in row:
                self.valorPago = row.valorPago

            # Query
            row = (sessao.query(func.COALESCE(
                func.SUM(ContaAPagar.valor), 0
            ).label('valorAPagar'))

                .filter(ContaAPagar.data_vencimento.between(
                    self.dataPagamento, self.dataFim))
            )
            row.all()

            # Salvando resultado
            for row in row:
                self.valorAPagar = row.valorAPagar

            # Fechando a Conexao
            sessao.close

        except IntegrityError as err:
            print(err)
def get_specific_month_amount(hold_date):
    """
    获取某天用户总持仓金额
    :return: list
    """
    # db = database.connection('portfolio_sta')
    # metadata = MetaData(bind=db)
    t = Table('ds_share', metadata, autoload=True)
    # Session = sessionmaker(bind=db)
    # session = Session()
    rst = session.query(func.SUM(t.c.ds_amount)).filter( \
                                    t.c.ds_date == hold_date)
    return rst.all()
Beispiel #10
0
    def movEntrada(self):

        try:

            # Abrindo Sessao
            conecta = Conexao()
            sessao = conecta.Session()

            # Query
            row = (sessao.query(
                func.COALESCE(func.SUM(ContaAReceber.valor_recebido),
                              0).label('valorRecebido')).filter(
                                  ContaAReceber.data_recebimento.between(
                                      self.dataRecebimento, self.dataFim)))
            row.all()

            # Salvando resultado
            for row in row:
                self.valorRecebido = row.valorRecebido

            # Query
            row = (sessao.query(
                func.COALESCE(func.SUM(ContaAReceber.valor),
                              0).label('valorAReceber')).filter(
                                  ContaAReceber.data_vencimento.between(
                                      self.dataRecebimento, self.dataFim)))
            row.all()

            # Salvando resultado
            for row in row:
                self.valorAReceber = row.valorAReceber

            # Fechando a Conexao
            sessao.close

        except IntegrityError as err:
            print(err)

        pass
def get_specific_date_amount(hold_date, uids):
    """
    获取某一天ds_uid in uids的用户持仓金额
    :param hold_date:持仓日期
    :param uids:用户id 列表
    :return: list
    """
    # db = database.connection('portfolio_sta')
    # metadata = MetaData(bind=db)
    t = Table('ds_share', metadata, autoload=True)
    # Session = sessionmaker(bind=db)
    # session = Session()
    rst = session.query(func.SUM(t.c.ds_amount)).filter(t.c.ds_date == hold_date, \
                                            t.c.ds_uid.in_(uids))

    return rst.all()
Beispiel #12
0
    def __call__(self, user_ids, session):
        """
        Parameters:
            user_ids    : list of mediawiki user ids to restrict computation to
            session     : sqlalchemy session open on a mediawiki database

        Returns:
            dictionary from user ids to: 1 if they're a rolling active editor, 0 if not
        """
        number_of_edits = int(self.number_of_edits.data)
        rolling_days = int(self.rolling_days.data)
        end_date = self.end_date.data
        start_date = end_date - timedelta(days=rolling_days)

        rev_user = label('user_id', Revision.rev_user)
        ar_user = label('user_id', Archive.ar_user)
        count = label('count', func.count())

        revisions = session.query(rev_user, count)\
            .filter(between(Revision.rev_timestamp, start_date, end_date))\
            .group_by(Revision.rev_user)
        revisions = self.filter(revisions, user_ids, column=Revision.rev_user)

        archived = session.query(ar_user, count)\
            .filter(between(Archive.ar_timestamp, start_date, end_date))\
            .group_by(Archive.ar_user)
        archived = self.filter(archived, user_ids, column=Archive.ar_user)

        bot_user_ids = session.query(MediawikiUserGroups.ug_user)\
            .filter(MediawikiUserGroups.ug_group == 'bot')\
            .subquery()

        edits = revisions.union_all(archived).subquery()
        edits_by_user = session.query(edits.c.user_id)\
            .filter(edits.c.user_id.notin_(bot_user_ids))\
            .group_by(edits.c.user_id)\
            .having(func.SUM(edits.c.count) >= number_of_edits)

        metric_results = {r[0]: {self.id: 1} for r in edits_by_user.all()}

        if user_ids is None:
            return metric_results
        else:
            return {
                uid: metric_results.get(uid, self.default_result)
                for uid in user_ids
            }
def get_specific_day_amount(date, t_type):
    """
    获取某个时间段内ds_trade_type=t_type的总金额
    :param date: string, 日期
    :param t_type: list, 交易类型
    :return: list
    """
    # db = database.connection('portfolio_sta')
    # metadata = MetaData(bind=db)
    t = Table('ds_order_pdate', metadata, autoload=True)
    # Session = sessionmaker(bind=db)
    # session = Session()
    rst = session.query(func.SUM(t.c.ds_amount)).filter( \
                                        t.c.ds_placed_date == date, \
                                        t.c.ds_trade_type.in_(t_type))
    # session.close()
    return rst.all()
def get_specific_month_amount(s_date, e_date, t_type, uids):
    """
    获取某个时间段内ds_trade_type=t_type的复购总金额
    :param s_date: string, 开始日期
    :param e_date: string, 结束日期
    :param t_type: list, 交易类型
    :return: list
    """
    # db = database.connection('portfolio_sta')
    # metadata = MetaData(bind=db)
    t = Table('ds_order', metadata, autoload=True)
    # Session = sessionmaker(bind=db)
    # session = Session()
    rst = session.query(func.SUM(t.c.ds_amount)).filter( \
                                        t.c.ds_trade_date >= s_date, \
                                        t.c.ds_trade_date <= e_date, \
                                        t.c.ds_trade_type.in_(t_type), \
                                        t.c.ds_uid.in_(uids))
    # session.close()
    return rst.all()
def get_workinhand_summary():

    filter_data = {}
    if 'filter' in request.args:
        filter_data = json.loads(request.args.get('filter'))

    query = g.s\
        .query(
             DO.OrgId
            ,DO.OrgName
            ,DPT.ProjectTypeDescription
            ,DPST.ProjectSubTypeDescription
            ,DED.EmployeeName.label("Director")
            ,func.SUM(FPF.AmountYetToInvoice).label("AmountYetToInvoice")
         )\
        .outerjoin(DD , FPF.DimDateKey == DD.DimDateKey)\
        .outerjoin(DO , FPF.DimOrganisationKey == DO.DimOrganisationKey)\
        .outerjoin(DED , FPF.DimEmployeeDirectorKey == DED.DimEmployeeKey)\
        .outerjoin(DPT , FPF.DimProjectTypeKey == DPT.DimProjectTypeKey)\
        .outerjoin(DPST , FPF.DimProjectSubTypeKey == DPST.DimProjectSubTypeKey)

    if 'DimDateKey' in filter_data:
        query = query.filter(DD.DK.in_(filter_data['DimDateKey']))

    queryResult = query.group_by(DO.OrgId, DO.OrgName,
                                 DPT.ProjectTypeDescription,
                                 DPST.ProjectSubTypeDescription,
                                 DED.EmployeeName).all()

    summarys = []
    for row in queryResult:
        item = {}
        item["OrgId"] = row[0]
        item["OrgName"] = row[1]
        item["Project Type"] = row[2]
        item["Project Sub Type"] = row[3]
        item["Director"] = row[4]
        item["AmountYetToInvoice"] = row[5]
        summarys.append(item)

    return summarys
Beispiel #16
0
    def aPagarHoje(self):

        try:

            # Abrindo Sessao
            conecta = Conexao()
            sessao = conecta.Session()

            # Query
            row = (sessao.query(func.COALESCE(
                func.SUM(ContaAPagar.valor), 0).label('total'))
                .filter(ContaAPagar.data_vencimento == date.today(), ContaAPagar.pagamento == 2))

            # Salvando Resultado
            for row in row:
                self.valorAPagar = row.total

        except IntegrityError as err:
            print(err)

        return self.valorAPagar
def _get_count(text, if_clause):
    """helper to create query below"""
    return label(text, func.SUM(func.IF(if_clause, 1, 0)))
Beispiel #18
0
    def get_hosps_status_counts(self, start_dt, end_dt, history, **kwargs):
        self.start_dt = start_dt
        self.end_dt = end_dt
        self.history = history
        self.hosp_status = safe_int(kwargs.get('hosp_status'))
        self.flt_org_struct_id = safe_int(kwargs.get('org_struct_id'))
        self.flt_client_id = safe_int(kwargs.get('client_id'))
        self.flt_exec_person_id = safe_int(kwargs.get('exec_person_id'))
        self.flt_external_id = kwargs.get('external_id')

        statuses = set(kwargs.get('statuses') or HospStateStatus.get_values())

        self.set_base_query()
        self._join_latest_location()
        self._join_location_org_structure()
        self._join_moving_os_transfer()
        self._join_movings_transfered_through()
        self._join_leaved()

        self.query = self.query.filter(
            or_(self.MovingAction.id.isnot(None),
                self.ReceivedAction.id.isnot(None)), ).with_entities()
        if HospStateStatus.current[0] in statuses:
            # кол-во текущих
            self.query = self.query.add_column(
                func.SUM(
                    func.IF(
                        func.IF(
                            self.MovingAction.id.isnot(None),
                            and_(
                                self.MovingAction_begDate < self.end_dt,
                                or_(self.MovingAction_endDate.is_(None),
                                    self.end_dt <= self.MovingAction_endDate),
                                func.IF(
                                    self.flt_org_struct_id is not None,
                                    self.LocationOSfromMoving.id ==
                                    self.flt_org_struct_id, 1)),
                            and_(
                                self.ReceivedAction_begDate < self.end_dt,
                                or_(self.ReceivedAction_endDate.is_(None),
                                    self.end_dt <=
                                    self.ReceivedAction_endDate),
                                func.IF(
                                    self.flt_org_struct_id is not None,
                                    self.LocationOSfromReceived.id ==
                                    self.flt_org_struct_id, 1))), 1,
                        0)).label('count_current'))

        if HospStateStatus.received[0] in statuses:
            # кол-во поступивших
            self.query = self.query.add_column(
                func.SUM(
                    func.IF(
                        func.IF(
                            self.MovingAction.id.isnot(None),
                            and_(
                                self.MovingAction_begDate < self.end_dt,
                                self.start_dt <= self.MovingAction_begDate,
                                or_(self.MovingAction_endDate.is_(None),
                                    self.start_dt <=
                                    self.MovingAction_endDate),
                                func.IF(
                                    self.flt_org_struct_id is not None,
                                    self.LocationOSfromMoving.id ==
                                    self.flt_org_struct_id, 1)),
                            and_(
                                self.ReceivedAction_begDate < self.end_dt,
                                self.start_dt <= self.ReceivedAction_begDate,
                                or_(
                                    self.ReceivedAction_endDate.is_(None),
                                    self.start_dt <=
                                    self.ReceivedAction_endDate),
                                func.IF(
                                    self.flt_org_struct_id is not None,
                                    self.LocationOSfromReceived.id ==
                                    self.flt_org_struct_id, 1))), 1,
                        0)).label('count_received'))

        if HospStateStatus.transferred[0] in statuses:
            # кол-во переведенных
            self.query = self.query.add_column(
                func.SUM(
                    func.IF(
                        self.q_movings_transfered_through.c.event_id.isnot(
                            None), 1, 0)).label('count_transferred'))

        if HospStateStatus.leaved[0] in statuses:
            # кол-во выписанных
            self.query = self.query.add_column(
                func.SUM(
                    func.IF(
                        and_(self.LeavedAction.id.isnot(None),
                             self.MovingOrgStructTransfer.id.is_(None),
                             self.MovingAction_begDate < self.end_dt,
                             self.MovingAction_endDate >= self.start_dt,
                             self.MovingAction_endDate < self.end_dt), 1,
                        0)).label('count_leaved'))

        return self.get_one()
Beispiel #19
0
def observation(request_type):
    request_datetime = datetime.strptime(
        request.args.get("datetime", 0000000000), "%Y%m%d%H")

    if request_type == 'station':
        # TODO: filter germany
        stations = db.session.query(Station.dwd_id, Station.name, Station.altitude,
                                    Station.region.ST_AsGeoJSON().label('geometry'), Observation.date,
                                    Observation.rainfall, Observation.temperature) \
            .filter(Observation.station_id == Station.id, Observation.date == request_datetime) \
            .all()

        stations = [station._asdict() for station in stations]
        feature_collection = to_feature_collection(stations)

        return json.jsonify(feature_collection)

    elif request_type == "district":
        q = db.session.query(District.id,
                             District.name,
                             District.admin_level,
                             District.geometry.ST_AsGeoJSON().label("geometry"),
                             (func1.SUM(Observation.temperature * ContribDistrict.area)
                                    / func1.SUM(ContribDistrict.area))
                                .label("temperature"),
                             (func1.SUM(Observation.rainfall * ContribDistrict.area)
                                    / func1.SUM(ContribDistrict.area))
                                .label("rainfall"),
                             )\
            .filter(Observation.date == request_datetime,
                    #District.geometry.ST_Intersects('SRID=4326;POINT(%s %s)' % (lon, lat)),
                    #State.geometry.ST_Intersects(Station.geometry),
                    District.id == ContribDistrict.district_id,
                    Station.id == ContribDistrict.station_id,
                    Station.id == Observation.station_id,
                    Observation.rainfall != -999,
                    Observation.temperature != -999
                    )\
            .group_by(District.id)

        districts = q.all()
        districts = [district._asdict() for district in districts]
        feature_collection = to_feature_collection(districts)

        return json.jsonify(feature_collection)

    elif request_type == "state":
        q = db.session.query(State.id,
                         State.name,
                         State.admin_level,
                         State.geometry.ST_AsGeoJSON().label("geometry"),
                         (func1.SUM(Observation.temperature
                                   * ContribState.area)
                                / func1.SUM(ContribState.area))
                            .label("temperature"),
                         (func1.SUM(Observation.rainfall
                                   * ContribState.area)
                                / func1.SUM(ContribState.area))
                            .label("rainfall"),
                         )\
            .filter(Observation.date == request_datetime,
                    #State.geometry.ST_Intersects('SRID=4326;POINT(%s %s)' % (lon, lat)),
                    #State.geometry.ST_Intersects(Station.geometry),
                    State.id == ContribState.state_id,
                    Station.id == ContribState.station_id,
                    Station.id == Observation.station_id,
                    Observation.rainfall != -999,
                    Observation.temperature != -999
                    )\
            .group_by(State.id)

        states = q.all()
        states = [state._asdict() for state in states]
        feature_collection = to_feature_collection(states)

        return json.jsonify(feature_collection)

    abort(404)
Beispiel #20
0
def info(request_type):
    """ example queries:
        http://127.0.0.1:5000/info/station.json?lon=8.237&lat=52.9335&forecast=False&datetime=2014120100
        http://127.0.0.1:5000/info/district.json?lon=10.237&lat=52.9335&forecast=False&datetime=2014120100
        http://127.0.0.1:5000/info/district.json?lon=9.237&lat=52.9335&forecast=False&datetime=2014120100 (example for no stations in district)
        http://127.0.0.1:5000/info/state.json?lon=13.237&lat=52.435&forecast=False&datetime=201420100
    """
    # todo handle dates without observations properly
    lat = float(request.args.get("lat"))
    lon = float(request.args.get("lon"))
    forecast = request.args.get("forecast") in ['true', 'True']
    # datetime format JJJJMMTTHH (Observation: JJJJMMMTT00)
    request_datetime = datetime.strptime(
        request.args.get("datetime", 0000000000), "%Y%m%d%H")
    hours = request.args.get(
        "hours",
        None)  # the number of hours into the future a forecast was made

    # If the data isn't a forecast it should ignore the hours of the datetime (set to 00).
    # TODO: discuss if this should handled at the client
    if not forecast:
        request_datetime = request_datetime.replace(hour=0, minute=0)

    if request_type == 'station' and not forecast:
        station = db.session.query(Station.dwd_id,
                                   Station.name,
                                   Station.altitude,
                                   Station.geometry.ST_AsGeoJSON().label('geometry'),
                                   Observation.date,
                                   Observation.rainfall,
                                   Observation.temperature) \
            .filter(Observation.station_id == Station.id,
                    Observation.date == request_datetime,
                    func.ST_Intersects(Station.region, 'SRID=4326;POINT(%s %s)' % (lon, lat))) \
            .first()
        if station == None:
            abort(404)

        feature = to_feature(station._asdict())
        return json.jsonify(feature)

    elif request_type == "district":
        q = db.session.query(District.id,
                             District.name,
                             District.admin_level,
                             District.geometry.ST_AsGeoJSON().label("geometry"),
                             (func1.SUM(Observation.temperature * ContribDistrict.area)
                                    / func1.SUM(ContribDistrict.area))
                                .label("mean_temperature"),
                             (func1.SUM(Observation.rainfall * ContribDistrict.area)
                                    / func1.SUM(ContribDistrict.area))
                                .label("mean_rainfall"),
                             )\
            .filter(Observation.date == request_datetime,
                    District.geometry.ST_Intersects('SRID=4326;POINT(%s %s)' % (lon, lat)),
                    #State.geometry.ST_Intersects(Station.geometry),
                    District.id == ContribDistrict.district_id,
                    Station.id == ContribDistrict.station_id,
                    Station.id == Observation.station_id,
                    Observation.rainfall != -999,
                    Observation.temperature != -999
                    )\
            .group_by(District.id)

        response = q.first()
        response = to_feature(response._asdict())
        return json.jsonify(response)

    elif request_type == "state":
        q = db.session.query(State.id,
                             State.name,
                             State.admin_level,
                             State.geometry.ST_AsGeoJSON().label("geometry"),
                             (func1.SUM(Observation.temperature
                                       * ContribState.area)
                                    / func1.SUM(ContribState.area))
                                .label("mean_temperature"),
                             (func1.SUM(Observation.rainfall
                                       * ContribState.area)
                                    / func1.SUM(ContribState.area))
                                .label("mean_rainfall"),
                             )\
            .filter(Observation.date == request_datetime,
                    State.geometry.ST_Intersects('SRID=4326;POINT(%s %s)' % (lon, lat)),
                    #State.geometry.ST_Intersects(Station.geometry),
                    State.id == ContribState.state_id,
                    Station.id == ContribState.station_id,
                    Station.id == Observation.station_id,
                    Observation.rainfall != -999,
                    Observation.temperature != -999
                    )\
            .group_by(State.id)

        response = q.first()
        response = to_feature(response._asdict())
        return json.jsonify(response)

    else:
        abort(404)
def compute_features_from_osm(config):

    osm_tables = config['OSM']
    bounding_box = WKTElement(config['BOUNDING_BOX'], srid=4326)
    grid_obj = config['GRID_OBJ']
    geo_feature_obj = config['GEO_FEATURE_OBJ']

    try:
        for feature_name, osm_table in osm_tables.items():
            geo_feature_type = osm_table.wkb_geometry.type.geometry_type
            cropped_osm = crop_osm(
                osm_table,
                bounding_box)  # crop the OSM data with a bounding box

            sub_query = session.query(grid_obj.gid, cropped_osm.c.fclass,
                                      func.ST_GeogFromWKB(
                                          func.ST_Intersection(grid_obj.geom, cropped_osm.c.wkb_geometry))
                                      .label('intersection')) \
                .filter(func.ST_Intersects(grid_obj.geom, cropped_osm.c.wkb_geometry)).subquery()

            results = []
            if geo_feature_type == 'MULTIPOLYGON':
                results = session.query(sub_query.c.gid.label('gid'),
                                        sub_query.c.fclass.label('feature_type'),
                                        literal(feature_name).label('geo_feature'),
                                        func.SUM(func.ST_AREA(sub_query.c.intersection)).label('value'),
                                        literal('area').label('measurement')) \
                    .group_by(sub_query.c.gid, sub_query.c.fclass).all()

            elif geo_feature_type == 'MULTILINESTRING':
                results = session.query(sub_query.c.gid.label('gid'),
                                        sub_query.c.fclass.label('feature_type'),
                                        literal(feature_name).label('geo_feature'),
                                        func.SUM(func.ST_LENGTH(sub_query.c.intersection)).label('value'),
                                        literal('length').label('measurement')) \
                    .group_by(sub_query.c.gid, sub_query.c.fclass).all()

            elif geo_feature_type == 'POINT':
                results = session.query(sub_query.c.gid.label('gid'),
                                        sub_query.c.fclass.label('feature_type'),
                                        literal(feature_name).label('geo_feature'),
                                        func.COUNT(sub_query.c.intersection).label('value'),
                                        literal('count').label('measurement')) \
                    .group_by(sub_query.c.gid, sub_query.c.fclass).all()

            else:
                pass

            obj_results = []
            for res in results:
                obj_results.append(
                    geo_feature_obj(gid=res[0],
                                    feature_type=res[1],
                                    geo_feature=res[2],
                                    value=res[3],
                                    measurement=res[4]))
            # session.add_all(obj_results)
            # session.commit()
            print('{} has finished'.format(feature_name))

        return

    except Exception as e:
        print(e)
        exit(-1)
    def __call__(self, user_ids, session):
        """
        Parameters:
            user_ids    : list of mediawiki user ids to restrict computation to
            session     : sqlalchemy session open on a mediawiki database

        Returns:
            {
                user id: 1 if they're a rolling surviving new active editor, 0 otherwise
                for all cohort users, or all users that have edits in the time period
            }
        """
        number_of_edits = int(self.number_of_edits.data)
        rolling_days = int(self.rolling_days.data)
        end_date = self.end_date.data
        mid_date = end_date - timedelta(days=rolling_days)
        start_date = end_date - timedelta(days=rolling_days * 2)

        newly_registered = session.query(Logging.log_user) \
            .filter(Logging.log_type == 'newusers') \
            .filter(Logging.log_action == 'create') \
            .filter(between(Logging.log_timestamp, start_date, mid_date))

        # subquery to select only the users registered between start and mid date
        filtered_new = self.filter(newly_registered,
                                   user_ids,
                                   column=Logging.log_user).subquery()

        rev_user = label('user_id', Revision.rev_user)
        ar_user = label('user_id', Archive.ar_user)
        # count edits between start and mid date, for Revision
        rev_count_one = _get_count('count_one',
                                   Revision.rev_timestamp <= mid_date)
        # count edits between start and mid date, for Archive
        ar_count_one = _get_count('count_one',
                                  Archive.ar_timestamp <= mid_date)
        # count edits between mid and end date, for Revision
        rev_count_two = _get_count('count_two',
                                   Revision.rev_timestamp > mid_date)
        # count edits between mid and end date, for Archive
        ar_count_two = _get_count('count_two', Archive.ar_timestamp > mid_date)

        # get both counts by user for Revision
        revisions = session.query(rev_user, rev_count_one, rev_count_two)\
            .filter(between(Revision.rev_timestamp, start_date, end_date))\
            .filter(Revision.rev_user.in_(filtered_new))\
            .group_by(Revision.rev_user)

        # get both counts by user for Archive
        archived = session.query(ar_user, ar_count_one, ar_count_two)\
            .filter(between(Archive.ar_timestamp, start_date, end_date))\
            .filter(Archive.ar_user.in_(filtered_new))\
            .group_by(Archive.ar_user)

        bot_user_ids = session.query(MediawikiUserGroups.ug_user)\
            .filter(MediawikiUserGroups.ug_group == 'bot')\
            .subquery()

        # For each user, with both counts from both tables,
        #   sum the count_one values together, check it's >= number_of_edits
        #   sum the count_two values together, check it's >= number_of_edits
        new_edits = revisions.union_all(archived).subquery()
        new_edits_by_user = session.query(new_edits.c.user_id)\
            .filter(new_edits.c.user_id.notin_(bot_user_ids))\
            .group_by(new_edits.c.user_id)\
            .having(and_(
                func.SUM(new_edits.c.count_one) >= number_of_edits,
                func.SUM(new_edits.c.count_two) >= number_of_edits,
            ))

        metric_results = {r[0]: {self.id: 1} for r in new_edits_by_user.all()}

        if user_ids is None:
            return metric_results
        else:
            return {
                uid: metric_results.get(uid, self.default_result)
                for uid in user_ids
            }