def getCountForCashConversionChart(self):
		session = loadSession()
		count = session.query(
							   PaymVoucFroz.c.revision,
							   PaymVoucFroz.c.charged_by_time,
							   PaymVoucFroz.c.announced_by_time,
							   PaymVoucFroz.c.payed_by_time,
							   PaymVoucFroz.c.announced_by_doc_time,
							   PaymVoucFroz.c.payed_by_doc_time,
							   PaymVoucFroz.c.booked_charged_by_time,
							   PaymVoucFroz.c.booked_payed_by_time,
							   PaymVoucFroz.c.charged_by_netto_sums,
							   PaymVoucFroz.c.frozen_val,
							   PaymVouc.c.belegdatum,
							   PaymVouc.c.belegid,
							   PaymVouc.c.valid_until,
							   PaymVouc.c.primanotaid,
							   PaymVouc.c.vunr,
							   PaymVouc.c.suppliersettled_at,
							   PaymVouc.c.charged_by,
							   ShopData.c.vunr,
							   ShopData.c.objectname)\
			.filter(PaymVouc.c.belegid == PaymVoucFroz.c.belegid,
					PaymVoucFroz.c.revision == 0,
					ShopData.c.vunr == PaymVouc.c.vunr,
					and_(
						func.to_date(PaymVouc.c.belegdatum, 'YYMMDD') >= self.__st_date,
						func.to_date(PaymVouc.c.belegdatum, 'YYMMDD') <= self.__end_date),
					PaymVouc.c.corporateno == self.__corporateId if self.__corporateId else '',
					PaymVouc.c.vunr == self.__supplierId if self.__supplierId else '')\
			.distinct()\
			.count()
		session.close()
		return count
	def getDataForDeadLine(self):
		session = loadSession()
		select = session.query(PaymVouc.c.corporateno,
							   PaymVouc.c.belegdatum,
							   PaymVouc.c.belegid,
							   PaymVouc.c.vunr,
							   PaymVouc.c.valid_until,
							   PaymVouc.c.suppliersettled_at,
							   PaymVoucFroz.c.revision,
							   PaymVoucFroz.c.payed_by_time,
							   PaymVoucFroz.c.payed_by_doc_time,
							   PaymVoucFroz.c.frozen_val,
							   PaymVoucFroz.c.announced_by_doc_time,
							   PaymVoucFroz.c.booked_payed_by_time,
							   PaymVoucFroz.c.booked_charged_by_time,
							   PaymVoucFroz.c.charged_by_doc,
							   ShopData.c.vunr,
							   ShopData.c.objectname)\
			.filter(PaymVouc.c.belegid == PaymVoucFroz.c.belegid,
					PaymVoucFroz.c.revision == 0,
					ShopData.c.vunr == PaymVouc.c.vunr,
					and_(
						func.to_date(PaymVouc.c.belegdatum, 'YYMMDD') >= self.__st_date,
						func.to_date(PaymVouc.c.belegdatum, 'YYMMDD') <= self.__end_date),
					PaymVouc.c.corporateno == self.__corporateId if self.__corporateId else '',
					PaymVouc.c.vunr == self.__supplierId if self.__supplierId else '')\
			.distinct()
		session.close()
		return select.all()
def check_value_type(value):
    if isinstance(value, datetime.datetime):
        return func.to_date(value, "yyyy-mm-dd hh24:mi:ss")
    elif isinstance(value, datetime.date):
        return func.to_date(value, "yyyy-mm-dd")
    else:
        return value
Exemple #4
0
    def get_ttd_restitusi_kompensasi(cls, kantor):
        sekarang = datetime.now().strftime("%Y-%m-%d")
        return Pegawai.query().\
                    filter(cls.nip == Pegawai.nip).\
                    filter(cls.kd_kanwil == kantor['kd_kanwil']).\
                    filter(cls.kd_kantor == kantor['kd_kantor']).\
                    filter(cls.kd_jabatan == '10', cls.kd_wewenang == '10').\
                    filter(cls.tgl_awal_berlaku <= func.to_date(sekarang, 'YYYY-MM-DD'),
                        cls.tgl_akhir_berlaku >= func.to_date(sekarang, 'YYYY-MM-DD')).\
                    first()


#END OF SCRIPT
def AddNewClass(batchcode,spycode,segmentcode,learningcentercode):
        i=1
        for tcprow in session.query(tcp).filter_by(batchcode=batchcode).\
                                     filter_by(spycode = spycode):
                  scode = batch[2:4]+str(learningcentercode)+tcprow.studenttype+tcprow.professionallevel
                  retclass= session.query(classinfo).filter_by(spycode=tcprow.spycode).\
                                                           filter_by(studentcategory=str(tcprow.studenttype)+str(tcprow.professionallevel)).\
                                                           filter_by(learningcentercode=learningcentercode).\
                                                           filter_by(batchcode=batchcode).first()
                  classname='分部'+str(segmentcode)+':'+batch[2:6]
                  if retclass is None:
                       newclasscode = scode+getMaxClass(scode)
                       print('add new class %s'%(newclasscode,))

                       retspy = session.query(spyinfo).filter_by(spycode=spycode).first()
                       if retspy is not None:
                          classname=classname+retspy.spyname
                          seq=Sequence('org_class_seq')
                          nextid = engine.execute(seq)
                          dt = func.to_date(datetime.datetime.now,'yyyy-mm-dd H:M:S')
                          #print(nextid)
                          newclass = classinfo(classid=nextid,batchcode=batchcode,learningcentercode=learningcentercode,classcode=newclasscode,classname=classname,studentcategory=str(tcprow.studenttype)+str(tcprow.professionallevel),spycode=spycode,professionallevel=tcprow.professionallevel,examsitecode=learningcentercode,classteacher='')
                          session.add(newclass)
                          #session.flush()
                          session.commit()
                          
                  else :
                       classname=retclass.classname
                                            
                  print('classname : %s  '  % (classname,))
Exemple #6
0
def AddNewClass(batchcode,spycode,segmentcode,learningcentercode):
        i=1
        for tcprow in session.query(tcp).filter_by(batchcode=batchcode).\
                                     filter_by(spycode = spycode):
                  scode = batch[2:4]+str(learningcentercode)+tcprow.studenttype+tcprow.professionallevel
                  retclass= session.query(classinfo).filter_by(spycode=tcprow.spycode).\
                                                           filter_by(studentcategory=str(tcprow.studenttype)+str(tcprow.professionallevel)).\
                                                           filter_by(learningcentercode=learningcentercode).\
                                                           filter_by(batchcode=batchcode).first()
                  classname='分部'+str(segmentcode)+':'+batch[2:6]
                  if retclass is None:
                       newclasscode = scode+getMaxClass(scode)
                       print('add new class %s'%(newclasscode,))

                       retspy = session.query(spyinfo).filter_by(spycode=spycode).first()
                       if retspy is not None:
                          classname=classname+retspy.spyname
                          seq=Sequence('org_class_seq')
                          nextid = engine.execute(seq)
                          dt = func.to_date(datetime.datetime.now,'yyyy-mm-dd H:M:S')
                          dt2 ='20'+ time.strftime('%y/%m/%d',time.localtime())
                          #print(nextid)
                          newclass = classinfo(classid=nextid,batchcode=batchcode,learningcentercode=learningcentercode,classcode=newclasscode,classname=classname,studentcategory=str(tcprow.studenttype)+str(tcprow.professionallevel),spycode=spycode,professionallevel=tcprow.professionallevel,examsitecode=learningcentercode,classteacher='',createtime=func.now())
                          session.add(newclass)
                          #session.flush()
                          session.commit()
                          
                  else :
                       classname=retclass.classname
                                            
                  print('classname : %s  '  % (classname,))
    def get_suppliers_with_expiring_documents(self, days):
        today = datetime.now(pytz.timezone('Australia/Sydney'))

        # Find out which of the supplier's documents have expired or are expiring soon
        liability = (select([Supplier.code, Supplier.name, literal('liability').label('type'),
                             Supplier.data['documents']['liability']['expiry'].astext.label('expiry')])
                     .where(and_(Supplier.data['documents']['liability']['expiry'].isnot(None),
                                 func.to_date(Supplier.data['documents']['liability']['expiry'].astext, 'YYYY-MM-DD') ==
                                 (today.date() + timedelta(days=days)))))
        workers = (select([Supplier.code, Supplier.name, literal('workers').label('type'),
                           Supplier.data['documents']['workers']['expiry'].astext.label('expiry')])
                   .where(and_(Supplier.data['documents']['workers']['expiry'].isnot(None),
                               func.to_date(Supplier.data['documents']['workers']['expiry'].astext, 'YYYY-MM-DD') ==
                               (today.date() + timedelta(days=days)))))

        expiry_dates = union(liability, workers).alias('expiry_dates')

        # Aggregate the document details so they can be returned with the results
        documents = (db.session.query(expiry_dates.columns.code, expiry_dates.columns.name,
                                      func.json_agg(
                                          func.json_build_object(
                                              'type', expiry_dates.columns.type,
                                              'expiry', expiry_dates.columns.expiry)).label('documents'))
                     .group_by(expiry_dates.columns.code, expiry_dates.columns.name)
                     .subquery('expired_documents'))

        # Find email addresses associated with the supplier
        email_addresses = self.get_supplier_contacts_union()

        # Aggregate the email addresses so they can be returned with the results
        aggregated_emails = (db.session.query(email_addresses.columns.code,
                                              func.json_agg(
                                                  email_addresses.columns.email_address
                                              ).label('email_addresses'))
                             .group_by(email_addresses.columns.code)
                             .subquery())

        # Combine the list of email addresses and documents
        results = (db.session.query(documents.columns.code, documents.columns.name, documents.columns.documents,
                                    aggregated_emails.columns.email_addresses)
                   .join(aggregated_emails,
                         documents.columns.code == aggregated_emails.columns.code)
                   .order_by(documents.columns.code)
                   .all())

        return [r._asdict() for r in results]
    def get_suppliers_with_expiring_documents(self, days):
        today = datetime.now(pytz.timezone('Australia/Sydney'))

        # Find out which of the supplier's documents have expired or are expiring soon
        liability = (select([Supplier.code, Supplier.name, literal('liability').label('type'),
                             Supplier.data['documents']['liability']['expiry'].astext.label('expiry')])
                     .where(and_(Supplier.data['documents']['liability']['expiry'].isnot(None),
                                 func.to_date(Supplier.data['documents']['liability']['expiry'].astext, 'YYYY-MM-DD') ==
                                 (today.date() + timedelta(days=days)))))
        workers = (select([Supplier.code, Supplier.name, literal('workers').label('type'),
                           Supplier.data['documents']['workers']['expiry'].astext.label('expiry')])
                   .where(and_(Supplier.data['documents']['workers']['expiry'].isnot(None),
                               func.to_date(Supplier.data['documents']['workers']['expiry'].astext, 'YYYY-MM-DD') ==
                               (today.date() + timedelta(days=days)))))

        expiry_dates = union(liability, workers).alias('expiry_dates')

        # Aggregate the document details so they can be returned with the results
        documents = (db.session.query(expiry_dates.columns.code, expiry_dates.columns.name,
                                      func.json_agg(
                                          func.json_build_object(
                                              'type', expiry_dates.columns.type,
                                              'expiry', expiry_dates.columns.expiry)).label('documents'))
                     .group_by(expiry_dates.columns.code, expiry_dates.columns.name)
                     .subquery('expired_documents'))

        # Find email addresses associated with the supplier
        email_addresses = self.get_supplier_contacts_union()

        # Aggregate the email addresses so they can be returned with the results
        aggregated_emails = (db.session.query(email_addresses.columns.code,
                                              func.json_agg(
                                                  email_addresses.columns.email_address
                                              ).label('email_addresses'))
                             .group_by(email_addresses.columns.code)
                             .subquery())

        # Combine the list of email addresses and documents
        results = (db.session.query(documents.columns.code, documents.columns.name, documents.columns.documents,
                                    aggregated_emails.columns.email_addresses)
                   .join(aggregated_emails,
                         documents.columns.code == aggregated_emails.columns.code)
                   .order_by(documents.columns.code)
                   .all())

        return [r._asdict() for r in results]
Exemple #9
0
def filter_query_all_filters(model, q, filters, user):
    """
    Return a query filtered with the cruved and all
    the filters available in the synthese form
    parameters:
        - q (SQLAchemyQuery): an SQLAchemy query
        - filters (dict): a dict of filter
        - user (User): a user object from User
        - allowed datasets (List<int>): an array of ID dataset where the users have autorization

    """
    q = filter_query_with_cruved(model, q, user)

    if "observers" in filters:
        q = q.filter(model.observers.ilike("%" + filters.pop("observers")[0] + "%"))

    if "date_min" in filters:
        q = q.filter(model.date_min >= filters.pop("date_min")[0])

    if "date_max" in filters:
        q = q.filter(model.date_min <= filters.pop("date_max")[0])

    if "id_acquisition_frameworks" in filters:
        q = q.join(
            TAcquisitionFramework,
            model.id_acquisition_framework
            == TAcquisitionFramework.id_acquisition_framework,
        )
        q = q.filter(
            TAcquisitionFramework.id_acquisition_framework.in_(
                filters.pop("id_acquisition_frameworks")
            )
        )

    if "geoIntersection" in filters:
        # Insersect with the geom send from the map
        ors = []
        for str_wkt in filters["geoIntersection"]:
            # if the geom is a circle
            if "radius" in filters:
                radius = filters.pop("radius")[0]
                wkt = circle_from_point(wkt, float(radius))
            else:
                wkt = loads(str_wkt)
            geom_wkb = from_shape(wkt, srid=4326)
            ors.append(model.the_geom_4326.ST_Intersects(geom_wkb))

        q = q.filter(or_(*ors))
        filters.pop("geoIntersection")

    if "period_start" in filters and "period_end" in filters:
        period_start = filters.pop("period_start")[0]
        period_end = filters.pop("period_end")[0]
        q = q.filter(
            or_(
                func.gn_commons.is_in_period(
                    func.date(model.date_min),
                    func.to_date(period_start, "DD-MM"),
                    func.to_date(period_end, "DD-MM"),
                ),
                func.gn_commons.is_in_period(
                    func.date(model.date_max),
                    func.to_date(period_start, "DD-MM"),
                    func.to_date(period_end, "DD-MM"),
                ),
            )
        )
    q, filters = filter_taxonomy(model, q, filters)

    # generic filters
    join_on_cor_area = False
    for colname, value in filters.items():
        if colname.startswith("area"):
            if not join_on_cor_area:
                q = q.join(
                    CorAreaSynthese, CorAreaSynthese.id_synthese == model.id_synthese
                )
            q = q.filter(CorAreaSynthese.id_area.in_(value))
            join_on_cor_area = True
        elif colname.startswith("modif_since_validation"):
            q = q.filter(model.meta_update_date > model.validation_date)
        else:
            col = getattr(model.__table__.columns, colname)
            q = q.filter(col.in_(value))
    return q
Exemple #10
0
def filter_query_all_filters(model, q, filters, user):
    """
    Return a query filtered with the cruved and all
    the filters available in the synthese form
    parameters:
        - q (SQLAchemyQuery): an SQLAchemy query
        - filters (dict): a dict of filter
        - user (User): a user object from User
        - allowed datasets (List<int>): an array of ID dataset where the users have autorization

    """
    q = filter_query_with_cruved(model, q, user)

    if "observers" in filters:
        q = q.filter(
            model.observers.ilike("%" + filters.pop("observers")[0] + "%"))

    if "id_organism" in filters:
        id_datasets = (DB.session.query(CorDatasetActor.id_dataset).filter(
            CorDatasetActor.id_organism.in_(filters.pop("id_organism"))).all())
        formated_datasets = [d[0] for d in id_datasets]
        q = q.filter(model.id_dataset.in_(formated_datasets))

    if "date_min" in filters:
        q = q.filter(model.date_min >= filters.pop("date_min")[0])

    if "date_max" in filters:
        # set the date_max at 23h59 because a hour can be set in timestamp
        date_max = datetime.datetime.strptime(
            filters.pop("date_max")[0], '%Y-%m-%d')
        date_max = date_max.replace(hour=23, minute=59, second=59)
        q = q.filter(model.date_max <= date_max)

    if "id_acquisition_framework" in filters:
        q = q.join(
            TAcquisitionFramework,
            model.id_acquisition_framework ==
            TAcquisitionFramework.id_acquisition_framework,
        )
        q = q.filter(
            TAcquisitionFramework.id_acquisition_framework.in_(
                filters.pop("id_acquisition_framework")))

    if "geoIntersection" in filters:
        # Intersect with the geom send from the map
        ors = []
        for str_wkt in filters["geoIntersection"]:
            # if the geom is a circle
            if "radius" in filters:
                radius = filters.pop("radius")[0]
                wkt = loads(str_wkt)
                wkt = circle_from_point(wkt, float(radius))
            else:
                wkt = loads(str_wkt)
            geom_wkb = from_shape(wkt, srid=4326)
            ors.append(model.the_geom_4326.ST_Intersects(geom_wkb))

        q = q.filter(or_(*ors))
        filters.pop("geoIntersection")

    if "period_start" in filters and "period_end" in filters:
        period_start = filters.pop("period_start")[0]
        period_end = filters.pop("period_end")[0]
        q = q.filter(
            or_(
                func.gn_commons.is_in_period(
                    func.date(model.date_min),
                    func.to_date(period_start, "DD-MM"),
                    func.to_date(period_end, "DD-MM"),
                ),
                func.gn_commons.is_in_period(
                    func.date(model.date_max),
                    func.to_date(period_start, "DD-MM"),
                    func.to_date(period_end, "DD-MM"),
                ),
            ))
    q, filters = filter_taxonomy(model, q, filters)

    # generic filters
    join_on_cor_area = False
    for colname, value in filters.items():
        if colname.startswith("area"):
            if not join_on_cor_area:
                q = q.join(CorAreaSynthese,
                           CorAreaSynthese.id_synthese == model.id_synthese)
            q = q.filter(CorAreaSynthese.id_area.in_(value))
            join_on_cor_area = True
        else:
            col = getattr(model.__table__.columns, colname)
            q = q.filter(col.in_(value))
    return q
Exemple #11
0
    def filter_other_filters(self):
        """
            Other filters
        """
        if "id_dataset" in self.filters:
            self.query = self.query.where(
                self.model.id_dataset.in_(self.filters.pop("id_dataset")))
        if "observers" in self.filters:
            #découpe des éléments saisies par les espaces
            observers = (self.filters.pop("observers")[0]).split()
            self.query = self.query.where(
                and_(*[
                    self.model.observers.ilike("%" + observer + "%")
                    for observer in observers
                ]))

        if "observers_list" in self.filters:
            self.query = self.query.where(
                and_(*[
                    self.model.observers.ilike("%" +
                                               observer.get('nom_complet') +
                                               "%")
                    for observer in self.filters.pop("observers_list")
                ]))

        if "id_organism" in self.filters:
            datasets = (DB.session.query(CorDatasetActor.id_dataset).filter(
                CorDatasetActor.id_organism.in_(
                    self.filters.pop("id_organism"))).all())
            formated_datasets = [d[0] for d in datasets]
            self.query = self.query.where(
                self.model.id_dataset.in_(formated_datasets))
        if "date_min" in self.filters:
            self.query = self.query.where(
                self.model.date_min >= self.filters.pop("date_min")[0])

        if "date_max" in self.filters:
            # set the date_max at 23h59 because a hour can be set in timestamp
            date_max = datetime.datetime.strptime(
                self.filters.pop("date_max")[0], '%Y-%m-%d')
            date_max = date_max.replace(hour=23, minute=59, second=59)
            self.query = self.query.where(self.model.date_max <= date_max)

        if "id_acquisition_framework" in self.filters:
            self.query = self.query.where(
                self.model.id_acquisition_framework.in_(
                    self.filters.pop("id_acquisition_framework")))

        if "geoIntersection" in self.filters:
            # Insersect with the geom send from the map
            ors = []
            for str_wkt in self.filters["geoIntersection"]:
                # if the geom is a circle
                if "radius" in self.filters:
                    radius = self.filters.pop("radius")[0]
                    wkt = loads(str_wkt)
                    wkt = circle_from_point(wkt, float(radius))
                else:
                    wkt = loads(str_wkt)
                geom_wkb = from_shape(wkt, srid=4326)
                ors.append(self.model.the_geom_4326.ST_Intersects(geom_wkb))

            self.query = self.query.where(or_(*ors))
            self.filters.pop("geoIntersection")

        if "period_start" in self.filters and "period_end" in self.filters:
            period_start = self.filters.pop("period_start")[0]
            period_end = self.filters.pop("period_end")[0]
            self.query = self.query.where(
                or_(
                    func.gn_commons.is_in_period(
                        func.date(self.model.date_min),
                        func.to_date(period_start, "DD-MM"),
                        func.to_date(period_end, "DD-MM"),
                    ),
                    func.gn_commons.is_in_period(
                        func.date(self.model.date_max),
                        func.to_date(period_start, "DD-MM"),
                        func.to_date(period_end, "DD-MM"),
                    ),
                ))
        # use for validation module since the class is factorized
        if "modif_since_validation" in self.filters:
            self.query = self.query.where(
                self.model.meta_update_date > self.model.validation_date)
            self.filters.pop("modif_since_validation")

        # generic filters
        for colname, value in self.filters.items():
            if colname.startswith("area"):
                self.add_join(CorAreaSynthese, CorAreaSynthese.id_synthese,
                              self.model.id_synthese)
                self.query = self.query.where(
                    CorAreaSynthese.id_area.in_(value))
            elif colname.startswith("id_"):
                col = getattr(self.model.__table__.columns, colname)
                self.query = self.query.where(col.in_(value))
            else:
                col = getattr(self.model.__table__.columns, colname)
                self.query = self.query.where(
                    col.ilike("%{}%".format(value[0])))
Exemple #12
0
def filter_query_all_filters(model, q, filters, user):
    """
    Return a query filtered with the cruved and all
    the filters available in the synthese form
    parameters:
        - q (SQLAchemyQuery): an SQLAchemy query
        - filters (dict): a dict of filter
        - user (User): a user object from User
        - allowed datasets (List<int>): an array of ID dataset where the users have autorization

    """
    q = filter_query_with_cruved(model, q, user)

    if "observers" in filters:
        q = q.filter(model.observers.ilike("%" + filters.pop("observers")[0] + "%"))

    if "id_organism" in filters:
        id_datasets = (
            DB.session.query(CorDatasetActor.id_dataset)
            .filter(CorDatasetActor.id_organism.in_(filters.pop("id_organism")))
            .all()
        )
        formated_datasets = [d[0] for d in id_datasets]
        q = q.filter(model.id_dataset.in_(formated_datasets))

    if "date_min" in filters:
        q = q.filter(model.date_min >= filters.pop("date_min")[0])

    if "date_max" in filters:
        q = q.filter(model.date_min <= filters.pop("date_max")[0])

    if "id_acquisition_framework" in filters:
        q = q.join(
            TAcquisitionFramework,
            model.id_acquisition_framework
            == TAcquisitionFramework.id_acquisition_framework,
        )
        q = q.filter(
            TAcquisitionFramework.id_acquisition_framework.in_(
                filters.pop("id_acquisition_frameworks")
            )
        )

    if "geoIntersection" in filters:
        # Insersect with the geom send from the map
        ors = []
        for str_wkt in filters["geoIntersection"]:
            # if the geom is a circle
            if "radius" in filters:
                radius = filters.pop("radius")[0]
                wkt = loads(str_wkt)
                wkt = circle_from_point(wkt, float(radius))
            else:
                wkt = loads(str_wkt)
            geom_wkb = from_shape(wkt, srid=4326)
            ors.append(model.the_geom_4326.ST_Intersects(geom_wkb))

        q = q.filter(or_(*ors))
        filters.pop("geoIntersection")

    if "period_start" in filters and "period_end" in filters:
        period_start = filters.pop("period_start")[0]
        period_end = filters.pop("period_end")[0]
        q = q.filter(
            or_(
                func.gn_commons.is_in_period(
                    func.date(model.date_min),
                    func.to_date(period_start, "DD-MM"),
                    func.to_date(period_end, "DD-MM"),
                ),
                func.gn_commons.is_in_period(
                    func.date(model.date_max),
                    func.to_date(period_start, "DD-MM"),
                    func.to_date(period_end, "DD-MM"),
                ),
            )
        )
    q, filters = filter_taxonomy(model, q, filters)

    # generic filters
    join_on_cor_area = False
    for colname, value in filters.items():
        if colname.startswith("area"):
            if not join_on_cor_area:
                q = q.join(
                    CorAreaSynthese, CorAreaSynthese.id_synthese == model.id_synthese
                )
            q = q.filter(CorAreaSynthese.id_area.in_(value))
            join_on_cor_area = True
        else:
            col = getattr(model.__table__.columns, colname)
            q = q.filter(col.in_(value))
    return q
Exemple #13
0
def latest_query(db,
                 var_id,
                 identifier_id,
                 start_date,
                 end_date,
                 location,
                 allowed_location=1,
                 level=None,
                 weeks=False,
                 date_variable=None,
                 week_offset=0):
    """
    To query register like data where we want to get the latest value.

    I.e If the value of the number of beds is updated each week and we want the latest number. 
    We take the latest value per clinic.

    Args:
        var_id: Variable id to get last of
        identifier_id: Id to identify which records we should use
        start_date: Start date
        end_date: End date
        location: Location to restrict to
        date_variable: if None we use date from data otherwise we use the variable indicated
        weeks: True if we want a breakdwon by weeks.
    Returns:
       result(dict): Dictionary with results. Always has total key, and if
                     level was given there is a level key with the data
                     breakdown
    """
    if allowed_location == 1:
        if g:
            allowed_location = g.allowed_location
    if not is_allowed_location(location, allowed_location):
        return {}
    location_condtion = [
        or_(loc == location for loc in (Data.country, Data.zone, Data.region,
                                        Data.district, Data.clinic))
    ]
    if date_variable:
        date_conditions = [
            func.to_date(Data.variables[date_variable].astext,
                         "YYYY-MM-DDTHH-MI-SS") >= start_date,
            func.to_date(Data.variables[date_variable].astext,
                         "YYYY-MM-DDTHH-MI-SS") < end_date
        ]
    else:
        date_conditions = [Data.date >= start_date, Data.date < end_date]
    conditions = location_condtion + date_conditions + [
        Data.variables.has_key(identifier_id)
    ]

    if weeks:
        epi_year_start = meerkat_abacus.util.epi_week.epi_year_start_date(
            start_date)
        if date_variable:
            c = func.floor(
                extract(
                    'days',
                    func.to_date(Data.variables[date_variable].astext,
                                 "YYYY-MM-DDTHH-MI-SS") - epi_year_start) / 7 +
                1).label("week")
        else:
            c = func.floor(
                extract('days', Data.date - epi_year_start) / 7 +
                1).label("week")
        # This query selects that latest record for each clinic for each week
        # that has the variable identifier_id
        query = db.session.query(
            Data.clinic, c, Data.date,
            Data.region, Data.district, Data.variables).distinct(
                Data.clinic, c).filter(*conditions).order_by(
                    Data.clinic).order_by(c).order_by(Data.date.desc())
        ret = {
            "total": 0,
            "weeks": {},
            "district": {},
            "clinic": {},
            "region": {}
        }

        for r in query:
            val = r.variables.get(var_id, 0)
            ret["total"] += val
            week = int(r.week) - week_offset
            ret["weeks"].setdefault(week, 0)
            ret["weeks"][week] += val

            ret["clinic"].setdefault(r.clinic, {"total": 0, "weeks": {}})
            ret["clinic"][r.clinic]["total"] += val
            ret["clinic"][r.clinic]["weeks"][week] = val
            ret["district"].setdefault(r.district, {"total": 0, "weeks": {}})
            ret["district"][r.district]["total"] += val
            ret["district"][r.district]["weeks"][week] = +val
            ret["region"].setdefault(r.region, {"total": 0, "weeks": {}})
            ret["region"][r.region]["total"] += val
            ret["region"][r.region]["weeks"][week] = +val
        return ret
    else:
        # This query selects that latest record for each clinic
        # that has the variable identifier_id
        query = db.session.query(
            Data.clinic, Data.date, Data.region, Data.district,
            Data.variables).distinct(Data.clinic).filter(*conditions).order_by(
                Data.clinic).order_by(Data.date.desc())

        ret = {"total": 0, "clinic": {}, "district": {}, "region": {}}
        for r in query:
            val = r.variables.get(var_id, 0)
            ret["total"] += val
            ret["clinic"][r.clinic] = val
            ret["district"].setdefault(r.district, 0)
            ret["district"][r.district] += val
            ret["region"].setdefault(r.region, 0)
            ret["region"][r.region] += val
        return ret
Exemple #14
0
def populateHourlyUtil():
    try:
        # Getting current date and current hour
        currDT = str(dt.now())
        currDate = currDT[0:currDT.find(' ')]
        currHour = currDT[currDT.find(' '):currDT.find(':')].strip()
        currHourInt = int(currHour)

        # Getting capacity of the current parking lot
        activePL = ParkingLot.query.filter_by(pl_active='t').first()
        parkingLotCapacity = int(activePL.pl_capacity)

        # Getting number of current occupied slots and utilization percentage
        currActiveTokens = Token.query.filter_by(exit_date=None).all()
        currUtil = parkingLotCapacity - len(currActiveTokens)
        currUtilPercent = (currUtil * 100) / parkingLotCapacity

        if (currHourInt == 0):
            prevDT = str(dt.now() - timedelta(days=1))
            prevDate = prevDT[0:prevDT.find(' ')]

            # Getting exit transactions of the last hour of the previous day
            lastHourTransactions = Token.query.filter(
                between(
                    Token.exit_date,
                    func.to_date(prevDate + " 23:00", "YYYY-MM-DD HH24:MI"),
                    func.to_date(prevDate + " 23:59",
                                 "YYYY-MM-DD HH24:MI"))).all()

            # Getting revenue collected till start of the last hour
            prevRev = 0.0

            lastlastHourUtil = HourlyUtil.query.filter(
                and_(
                    HourlyUtil.util_date == func.to_date(
                        prevDate, "YYYY-MM-DD"),
                    HourlyUtil.util_hour == 22)).first()
            if (lastlastHourUtil is None):
                prevRev = 0.0
            else:
                prevRev = float(lastlastHourUtil.rev)

            # Getting revenue collected in the last hour
            currRev = 0.0
            for lastHourTransaction in lastHourTransactions:
                currRev = currRev + float(lastHourTransaction.computed_charge)

            # Computing cumulative revenue and storing as a new row in the Hourly Util table
            currCumuRev = currRev + prevRev
            hourlyUtilEntry = HourlyUtil(prevDate, 23, currUtilPercent,
                                         currCumuRev)
            db.session.add(hourlyUtilEntry)
            db.session.commit()
        else:

            # Getting exit transactions of the last hour
            lastHourTransactions = Token.query.filter(
                between(
                    Token.exit_date,
                    func.to_date(currDate + " " + str(currHourInt - 1) + ":00",
                                 "YYYY-MM-DD HH24:MI"),
                    func.to_date(currDate + " " + str(currHourInt - 1) + ":59",
                                 "YYYY-MM-DD HH24:MI"))).all()

            # Getting revenue collected till start of the last hour
            prevRev = 0.0
            if (currHourInt >= 2):
                lastlastHourUtil = HourlyUtil.query.filter(
                    and_(
                        HourlyUtil.util_date == func.to_date(
                            currDate, "YYYY-MM-DD"),
                        HourlyUtil.util_hour == (currHourInt - 2))).first()
                if (lastlastHourUtil is None):
                    prevRev = 0.0
                else:
                    prevRev = float(lastlastHourUtil.rev)

            # Getting revenue collected in the last hour
            currRev = 0.0
            for lastHourTransaction in lastHourTransactions:
                currRev = currRev + float(lastHourTransaction.computed_charge)

            # Computing cumulative revenue and storing as a new row in the Hourly Util table
            currCumuRev = currRev + prevRev
            hourlyUtilEntry = HourlyUtil(currDate, currHour, currUtilPercent,
                                         currCumuRev)
            db.session.add(hourlyUtilEntry)
            db.session.commit()

    except Exception, e:
        print e
Exemple #15
0
def filter_query_all_filters(model, q, filters, user, allowed_datasets):
    """
    Return a query filtered with the cruved and all
    the filters available in the synthese form
    parameters:
        - q (SQLAchemyQuery): an SQLAchemy query
        - filters (dict): a dict of filter
        - user (TRoles): a user object from TRoles
        - allowed datasets (List<int>): an array of ID dataset where the users have autorization

    """

    # from geonature.core.users.models import UserRigth

    # user = UserRigth(
    #     id_role=user.id_role,
    #     tag_object_code='3',
    #     tag_action_code="R",
    #     id_organisme=user.id_organisme,
    #     nom_role='Administrateur',
    #     prenom_role='test'
    # )
    q = filter_query_with_cruved(model, q, user, allowed_datasets)

    if 'observers' in filters:
        q = q.filter(
            model.observers.ilike('%' + filters.pop('observers')[0] + '%'))

    if 'date_min' in filters:
        q = q.filter(model.date_min >= filters.pop('date_min')[0])

    if 'date_max' in filters:
        q = q.filter(model.date_min <= filters.pop('date_max')[0])

    if 'id_acquisition_frameworks' in filters:
        q = q.join(
            TAcquisitionFramework, model.id_acquisition_framework ==
            TAcquisitionFramework.id_acquisition_framework)
        q = q.filter(
            TAcquisitionFramework.id_acquisition_framework.in_(
                filters.pop('id_acquisition_frameworks')))

    if 'municipalities' in filters:
        q = q.filter(
            model.id_municipality.in_(
                [com for com in filters['municipalities']]))
        filters.pop('municipalities')

    if 'geoIntersection' in filters:
        # Insersect with the geom send from the map
        geom_wkt = loads(request.args['geoIntersection'])
        # if the geom is a circle
        if 'radius' in filters:
            radius = filters.pop('radius')[0]
            geom_wkt = circle_from_point(geom_wkt, float(radius))
        geom_wkb = from_shape(geom_wkt, srid=4326)
        q = q.filter(model.the_geom_4326.ST_Intersects(geom_wkb))
        filters.pop('geoIntersection')

    if 'period_start' in filters and 'period_end' in filters:
        period_start = filters.pop('period_start')[0]
        period_end = filters.pop('period_end')[0]
        q = q.filter(
            or_(
                func.gn_commons.is_in_period(
                    func.date(model.date_min),
                    func.to_date(period_start, 'DD-MM'),
                    func.to_date(period_end, 'DD-MM')),
                func.gn_commons.is_in_period(
                    func.date(model.date_max),
                    func.to_date(period_start, 'DD-MM'),
                    func.to_date(period_end, 'DD-MM'))))
    q, filters = filter_taxonomy(model, q, filters)

    # generic filters
    join_on_cor_area = False
    for colname, value in filters.items():
        if colname.startswith('area'):
            if not join_on_cor_area:
                q = q.join(CorAreaSynthese,
                           CorAreaSynthese.id_synthese == model.id_synthese)
            q = q.filter(CorAreaSynthese.id_area.in_(value))
            join_on_cor_area = True
        else:
            col = getattr(model.__table__.columns, colname)
            q = q.filter(col.in_(value))
    return q
Exemple #16
0
    def get(self,
            variable,
            group_by,
            start_date=None,
            end_date=None,
            only_loc=None,
            use_ids=None,
            date_variable=None,
            additional_variables=None,
            group_by_variables=None):

        variable = str(variable)
        if not only_loc:
            if "only_loc" in request.args:
                only_loc = request.args["only_loc"]
            else:
                only_loc = g.allowed_location
        if not is_allowed_location(only_loc, g.allowed_location):
            return {}

        start_date, end_date = fix_dates(start_date, end_date)
        if "use_ids" in request.args.keys() or use_ids:
            use_ids = True
        else:
            use_ids = False

        if date_variable:
            date_conditions = [
                func.to_date(Data.variables[date_variable].astext,
                             "YYYY-MM-DDTHH-MI-SS") >= start_date,
                func.to_date(Data.variables[date_variable].astext,
                             "YYYY-MM-DDTHH-MI-SS") < end_date
            ]
        else:
            date_conditions = [Data.date >= start_date, Data.date < end_date]

        if "location" in variable:
            location_id = variable.split(":")[1]
            conditions = date_conditions + [
                or_(loc == location_id
                    for loc in (Data.country, Data.zone, Data.region,
                                Data.district, Data.clinic))
            ]
        else:
            conditions = [Data.variables.has_key(variable)] + date_conditions
            if additional_variables:
                # add additional variable filters if there are and
                for i in additional_variables:
                    conditions.append(Data.variables.has_key(i))

            if only_loc:
                conditions += [
                    or_(loc == only_loc
                        for loc in (Data.country, Data.zone, Data.region,
                                    Data.district, Data.clinic))
                ]
        epi_year_start = meerkat_abacus.util.epi_week.epi_year_start_date(
            start_date)
        # Determine which columns we want to extract from the Data table
        columns_to_extract = [func.count(Data.id).label('value')]
        if date_variable:
            columns_to_extract.append(
                func.floor(
                    extract(
                        'days',
                        func.to_date(Data.variables[date_variable].astext,
                                     "YYYY-MM-DDTHH-MI-SS") - epi_year_start) /
                    7 + 1).label("week"))
        else:
            columns_to_extract.append(
                func.floor(
                    extract('days', Data.date - epi_year_start) / 7 +
                    1).label("week"))
        # We want to add the columns to extract based on the group_by value
        # in addition we create a names dict that translates ids to names

        if "locations" in group_by:
            # If we have locations in group_by we also specify the level at
            #  which we want to group the locations, clinic, district or region
            if ":" in group_by:
                level = group_by.split(":")[1]
            else:
                level = "clinic"

            locations = abacus_util.get_locations(db.session)
            ids = locations.keys()
            names = get_locations_by_level(level, only_loc)

            columns_to_extract += [getattr(Data, level, None)]
            group_by_query = level
        else:
            if not group_by_variables:
                names = get_variables(group_by)
            else:
                names = group_by_variables
            if len(names) == 0:
                return {}
            ids = names.keys()
            for i in ids:
                columns_to_extract.append(
                    Data.variables.has_key(str(i)).label("id" + str(i)))
            group_by_query = ",".join(["id" + str(i) for i in ids])
        if use_ids:
            names = {vid: vid for vid in names.keys()}
        start_epi_week = abacus_util.epi_week.epi_week_for_date(start_date)[1]
        end_epi_week = abacus_util.epi_week.epi_week_for_date(end_date)[1]

        # How we deal with start and end dates in different years
        if start_date.year != end_date.year:
            end_epi_week += 53 * (end_date.year - start_date.year)

        # DB Query
        results = db.session.query(*tuple(columns_to_extract)).filter(
            *conditions).group_by("week," + group_by_query)
        # Assemble return dict
        ret = {}
        for n in names.values():
            ret[n] = {
                "total": 0,
                "weeks":
                {i: 0
                 for i in range(start_epi_week, end_epi_week + 1)}
            }

        for r in results:
            # r = (number, week, other_columns_to_extract
            if "locations" in group_by:
                # r[2] = location
                if r[2]:
                    ret[names[r[2]]]["total"] += r[0]
                    ret[names[r[2]]]["weeks"][int(r[1])] = int(r[0])
            else:
                # r[2:] are the ids that the record has
                for i, i_d in enumerate(ids):
                    if r[i + 2]:
                        ret[names[i_d]]["total"] += r[0]
                        ret[names[i_d]]["weeks"][int(r[1])] = int(r[0])
        return ret
Exemple #17
0
    def filter_other_filters(self):
        """
            Other filters
        """

        if "has_medias" in self.filters:
            self.query = self.query.where(
                self.model.has_medias
            )

        if "id_dataset" in self.filters:
            self.query = self.query.where(
                self.model.id_dataset.in_(self.filters.pop("id_dataset"))
            )
        if "observers" in self.filters:
            # découpe des éléments saisies par les espaces
            observers = (self.filters.pop("observers")[0]).split()
            self.query = self.query.where(
                and_(*[self.model.observers.ilike("%" + observer + "%") for observer in observers])
            )

        if "observers_list" in self.filters:
            self.query = self.query.where(
                and_(
                    *[
                        self.model.observers.ilike("%" + observer.get("nom_complet") + "%")
                        for observer in self.filters.pop("observers_list")
                    ]
                )
            )

        if "id_organism" in self.filters:
            datasets = (
                DB.session.query(CorDatasetActor.id_dataset)
                .filter(CorDatasetActor.id_organism.in_(self.filters.pop("id_organism")))
                .all()
            )
            formated_datasets = [d[0] for d in datasets]
            self.query = self.query.where(self.model.id_dataset.in_(formated_datasets))
        if "date_min" in self.filters:
            self.query = self.query.where(self.model.date_min >= self.filters.pop("date_min")[0])

        if "date_max" in self.filters:
            # set the date_max at 23h59 because a hour can be set in timestamp
            date_max = datetime.datetime.strptime(self.filters.pop("date_max")[0], "%Y-%m-%d")
            date_max = date_max.replace(hour=23, minute=59, second=59)
            self.query = self.query.where(self.model.date_max <= date_max)

        if "id_acquisition_framework" in self.filters:
            if hasattr(self.model, 'id_acquisition_framework'):
                self.query = self.query.where(
                    self.model.id_acquisition_framework.in_(
                        self.filters.pop("id_acquisition_framework")
                    )
                )
            else:
                self.add_join(TDatasets, self.model.id_dataset, TDatasets.id_dataset)
                self.query = self.query.where(
                    TDatasets.id_acquisition_framework.in_(
                        self.filters.pop("id_acquisition_framework")
                    )
                )

        if "geoIntersection" in self.filters:
            # Insersect with the geom send from the map
            ors = []

            for str_wkt in self.filters["geoIntersection"]:
                # if the geom is a circle
                if "radius" in self.filters:
                    radius = self.filters.pop("radius")[0]
                    wkt = loads(str_wkt)
                    wkt = circle_from_point(wkt, float(radius))
                else:
                    wkt = loads(str_wkt)
                geom_wkb = from_shape(wkt, srid=4326)
                ors.append(self.model.the_geom_4326.ST_Intersects(geom_wkb))

            self.query = self.query.where(or_(*ors))
            self.filters.pop("geoIntersection")

        if "period_start" in self.filters and "period_end" in self.filters:
            period_start = self.filters.pop("period_start")[0]
            period_end = self.filters.pop("period_end")[0]
            self.query = self.query.where(
                or_(
                    func.gn_commons.is_in_period(
                        func.date(self.model.date_min),
                        func.to_date(period_start, "DD-MM"),
                        func.to_date(period_end, "DD-MM"),
                    ),
                    func.gn_commons.is_in_period(
                        func.date(self.model.date_max),
                        func.to_date(period_start, "DD-MM"),
                        func.to_date(period_end, "DD-MM"),
                    ),
                )
            )
        if "unique_id_sinp" in self.filters:
            try:
                uuid_filter = uuid.UUID(self.filters.pop("unique_id_sinp")[0])
            except ValueError as e:
                raise BadRequest(str(e))
            self.query = self.query.where(self.model.unique_id_sinp == uuid_filter)
        # generic filters
        for colname, value in self.filters.items():
            if colname.startswith("area"):
                self.add_join(CorAreaSynthese, CorAreaSynthese.id_synthese, self.model.id_synthese)
                self.query = self.query.where(CorAreaSynthese.id_area.in_(value))
            elif colname.startswith("id_"):
                col = getattr(self.model.__table__.columns, colname)
                self.query = self.query.where(col.in_(value))
            elif hasattr(self.model.__table__.columns, colname):
                col = getattr(self.model.__table__.columns, colname)
                if str(col.type) == "INTEGER":
                    if colname in ["precision"]:
                        self.query = self.query.where(col <= value[0])
                    else:
                        self.query = self.query.where(col == value[0])
                else:
                    self.query = self.query.where(col.ilike("%{}%".format(value[0])))
    def filter_other_filters(self):
        """
            Other filters
        """
        if "id_dataset" in self.filters:
            self.query = self.query.where(
                self.model.id_dataset.in_(self.filters.pop("id_dataset"))
            )
        if "observers" in self.filters:
            self.query = self.query.where(
                self.model.observers.ilike("%" + self.filters.pop("observers")[0] + "%")
            )

        if "id_organism" in self.filters:
            datasets = (
                DB.session.query(CorDatasetActor.id_dataset)
                .filter(
                    CorDatasetActor.id_organism.in_(self.filters.pop("id_organism"))
                )
                .all()
            )
            formated_datasets = [d[0] for d in datasets]
            self.query = self.query.where(self.model.id_dataset.in_(formated_datasets))

        if "date_min" in self.filters:
            self.query = self.query.where(
                self.model.date_min >= self.filters.pop("date_min")[0]
            )

        if "date_max" in self.filters:
            self.query = self.query.where(
                self.model.date_min <= self.filters.pop("date_max")[0]
            )

        if "id_acquisition_framework" in self.filters:
            self.query = self.query.where(
                self.model.id_acquisition_framework.in_(
                    self.filters.pop("id_acquisition_framework")
                )
            )

        if "geoIntersection" in self.filters:
            # Insersect with the geom send from the map
            ors = []
            for str_wkt in self.filters["geoIntersection"]:
                # if the geom is a circle
                if "radius" in self.filters:
                    radius = self.filters.pop("radius")[0]
                    wkt = loads(str_wkt)
                    wkt = circle_from_point(wkt, float(radius))
                else:
                    wkt = loads(str_wkt)
                geom_wkb = from_shape(wkt, srid=4326)
                ors.append(self.model.the_geom_4326.ST_Intersects(geom_wkb))

            self.query = self.query.where(or_(*ors))
            self.filters.pop("geoIntersection")

        if "period_start" in self.filters and "period_end" in self.filters:
            period_start = self.filters.pop("period_start")[0]
            period_end = self.filters.pop("period_end")[0]
            self.query = self.query.where(
                or_(
                    func.gn_commons.is_in_period(
                        func.date(self.model.date_min),
                        func.to_date(period_start, "DD-MM"),
                        func.to_date(period_end, "DD-MM"),
                    ),
                    func.gn_commons.is_in_period(
                        func.date(self.model.date_max),
                        func.to_date(period_start, "DD-MM"),
                        func.to_date(period_end, "DD-MM"),
                    ),
                )
            )

        # generic filters
        for colname, value in self.filters.items():
            if colname.startswith("area"):
                self.add_join(
                    CorAreaSynthese, CorAreaSynthese.id_synthese, self.model.id_synthese
                )
                self.query = self.query.where(CorAreaSynthese.id_area.in_(value))
            else:
                col = getattr(self.model.__table__.columns, colname)
                self.query = self.query.where(col.in_(value))
Exemple #19
0
def _01():
    search_form = SearchForm(prefix='search')
    g1 = aliased(OusiGuest)
    g2 = aliased(OusiGuest)
    page = request.args.get('page', 1, type=int)
    if current_user.role == 'admin':
        database = db.session.query(
            OusiStaff.department, OusiStaff.name.label('staff_name'),
            OusiStaff.phone, OusiStaff.role, g1.name.label('guest_name'),
            g1.month, g1.balance,
            func.nvl(
                db.session.query(g2.balance).filter(
                    g1.name == g2.name,
                    func.to_date(g2.month, 'yyyy-mm') == func.add_months(
                        func.to_date(g1.month, 'yyyy-mm'), -1)),
                0).label('last_balance')).filter(
                    OusiStaff.phone == g1.staff_phone,
                    current_user.department == OusiStaff.department,
                    g1.month == date.today().strftime('%Y-%m')).order_by(
                        g1.name).group_by(OusiStaff.department,
                                          OusiStaff.name.label('staff_name'),
                                          OusiStaff.phone, OusiStaff.role,
                                          g1.name.label('guest_name'),
                                          g1.month, g1.balance)
        if search_form.validate_on_submit():
            database = db.session.query(
                OusiStaff.department, OusiStaff.name.label('staff_name'),
                OusiStaff.phone, OusiStaff.role, g1.name.label('guest_name'),
                g1.month, g1.balance,
                func.nvl(
                    db.session.query(g2.balance).filter(
                        g1.name == g2.name,
                        func.to_date(g2.month, 'yyyy-mm') == func.add_months(
                            func.to_date(g1.month, 'yyyy-mm'), -1)),
                    0).label('last_balance')).filter(
                        OusiStaff.phone == g1.staff_phone,
                        current_user.department == OusiStaff.department,
                        and_(
                            g1.month.between(
                                search_form.start_time.data.strip(),
                                search_form.end_time.data.strip()),
                            OusiStaff.name.like('%{}%'.format(
                                search_form.name.data.strip())),
                            OusiStaff.phone.like('%{}%'.format(
                                search_form.phone.data.strip())))).order_by(
                                    g1.name).group_by(
                                        OusiStaff.department,
                                        OusiStaff.name.label('staff_name'),
                                        OusiStaff.phone, OusiStaff.role,
                                        g1.name.label('guest_name'), g1.month,
                                        g1.balance)
    else:
        database = db.session.query(
            OusiStaff.department, OusiStaff.name.label('staff_name'),
            OusiStaff.phone, OusiStaff.role, g1.name.label('guest_name'),
            g1.month, g1.balance,
            func.nvl(
                db.session.query(g2.balance).filter(
                    g1.name == g2.name,
                    func.to_date(g2.month, 'yyyy-mm') == func.add_months(
                        func.to_date(g1.month, 'yyyy-mm'), -1)),
                0).label('last_balance')).filter(
                    OusiStaff.phone == g1.staff_phone,
                    current_user.phone == g1.staff_phone,
                    g1.month == date.today().strftime('%Y-%m')).order_by(
                        g1.name).group_by(OusiStaff.department,
                                          OusiStaff.name.label('staff_name'),
                                          OusiStaff.phone, OusiStaff.role,
                                          g1.name.label('guest_name'),
                                          g1.month, g1.balance)
        if search_form.validate_on_submit():
            database = db.session.query(
                OusiStaff.department, OusiStaff.name.label('staff_name'),
                OusiStaff.phone, OusiStaff.role, g1.name.label('guest_name'),
                g1.month, g1.balance,
                func.nvl(
                    db.session.query(g2.balance).filter(
                        g1.name == g2.name,
                        func.to_date(g2.month, 'yyyy-mm') == func.add_months(
                            func.to_date(g1.month, 'yyyy-mm'), -1)),
                    0).label('last_balance')).filter(
                        OusiStaff.phone == g1.staff_phone,
                        current_user.phone == OusiStaff.phone,
                        g1.month.between(
                            search_form.start_time.data.strip(),
                            search_form.end_time.data.strip())).order_by(
                                g1.name).group_by(
                                    OusiStaff.department,
                                    OusiStaff.name.label('staff_name'),
                                    OusiStaff.phone, OusiStaff.role,
                                    g1.name.label('guest_name'), g1.month,
                                    g1.balance)
    data = database.paginate(
        page,
        per_page=current_app.config['OUSI_POSTS_PER_PAGE'],
        error_out=False)

    return render_template('show/01.html',
                           data=data,
                           searchForm=search_form,
                           database=json.dumps(database,
                                               cls=AlchemyJsonEncoder))