Example #1
0
    def send(start, stop):
        """send expiring access chat remninders."""
        if current_app.config['ROCKET_ENABLED'] != 1:
            print("rocket sending is disabled %s" %
                  current_app.config['ROCKET_ENABLED'])
            return

        print("start: %s stop: %s" % (start, stop))

        access = Access.query.filter(
            func.datetime(Access.stop) > start,
            func.datetime(Access.stop) < stop).all()

        rocket = RocketChat(current_app.config['ROCKET_USER'],
                            current_app.config['ROCKET_PASS'],
                            server_url=current_app.config['ROCKET_URL'])

        for a in access:
            stopdate = a.stop.strftime("%Y-%m-%d")
            msg = f'''Access for: @{a.user.username} \
                    to: {a.role.name} \
                    expires at: {stopdate} \
                    mgr: @{a.role.service.manager.username} \
                    please act on this as needed'''

            pprint(
                rocket.chat_post_message(
                    msg, channel=current_app.config['ROCKET_CHANNEL']).json())
            time.sleep(1)
Example #2
0
class MockData(ModelBase):
    __tablename__ = 'mock_data'

    id = db.Column(db.Integer, primary_key=True)
    project_id = db.Column(db.Integer, nullable=False)

    method = db.Column(db.String(10), nullable=False, server_default='GET')
    url = db.Column(db.String(2048), index=True, nullable=False)

    request = db.Column(db.Text, comment='请求匹配列表,type: json')  # 新加

    response = db.Column(db.Text)
    code = db.Column(db.Integer,
                     server_default='200',
                     comment='http 响应状态码:200(默认)')
    content_type = db.Column(db.String(30), server_default='application/json')
    headers = db.Column(db.Text)
    match_type = db.Column(db.Integer,
                           server_default='0',
                           comment='URL 匹配模式:0普通模式(默认),1 正则模式')
    status = db.Column(db.Integer,
                       server_default='0',
                       comment='Mock 数据状态:0关闭(默认),1 打开,-1 删除')
    description = db.Column(db.String(512))
    create_time = db.Column(db.DateTime,
                            server_default=func.datetime('now', 'localtime'))
    update_time = db.Column(db.DateTime,
                            server_default=func.datetime('now', 'localtime'),
                            server_onupdate=func.datetime('now', 'localtime'))

    def __repr__(self):
        return '<MockData: id={} method={} url={}>'.format(
            self.id, self.method, self.url)
Example #3
0
class MockData(Base):
    __tablename__ = 'mock_data'

    id = Column(Integer, primary_key=True)
    project_id = Column(Integer, nullable=False)
    method = Column(String(10), nullable=False, server_default='GET')
    url = Column(String(2048), index=True, nullable=False)
    response = Column(Text)
    code = Column(Integer, server_default='200', comment='http 响应状态码:200(默认)')
    content_type = Column(String(30), server_default='application/json')
    headers = Column(Text)
    match_type = Column(Integer,
                        server_default='0',
                        comment='URL 匹配模式:0普通模式(默认),1 正则模式')
    status = Column(Integer,
                    server_default='0',
                    comment='Mock 数据状态:0关闭(默认),1 打开,-1 删除')
    description = Column(String(512))
    create_time = Column(DateTime,
                         server_default=func.datetime('now', 'localtime'))
    update_time = Column(DateTime,
                         server_default=func.datetime('now', 'localtime'),
                         server_onupdate=func.datetime('now', 'localtime'))

    # 默认排序规则
    __mapper_args__ = {'order_by': create_time.desc()}

    def __repr__(self):
        return '<MockData: id={} method={} url={}>'.format(
            self.id, self.method, self.url)
Example #4
0
 def query_mealsignin(self, start_date, stop_date):
     # Sqlite stores dates as strings, use sqlalchemy func to convert
     query_statement = (self.session.query(
         SignLog, Clients).filter(SignLog.fr_id == Clients.fr_id).filter(
             SignLog.time >= func.datetime(start_date)).filter(
                 SignLog.time <= func.datetime(stop_date)).statement)
     df_signlog = pd.read_sql(query_statement, self.db_engine)
     df_meallog = pd.DataFrame({
         'Client Name':
         df_signlog['first_name'] + ' ' + df_signlog['middle_name'] + ' ' +
         df_signlog['last_name'],
         'Race':
         self.nonzerocols(df_signlog[[
             'am_ind_ak_native', 'asian', 'black_af_american',
             'native_hi_other_pacific', 'white'
         ]].astype(bool)),
         'Ethnicity':
         df_signlog['ethnicity'].apply(
             lambda x: self.datadict['3.05.1']['data'][str(x)]),
         'DOB':
         df_signlog['dob'],
         'Gender':
         df_signlog['gender'].apply(
             lambda x: self.datadict['3.06.1']['data'][str(x)]),
         'First Time':
         df_signlog['first_time'].apply(lambda x: {
             0: 'No',
             1: 'Yes'
         }[x])
     })
     df_meallog['Client Name'] = df_meallog['Client Name'].str.replace(
         ' +', ' ', regex=True)
     return df_meallog
Example #5
0
class MockProject(ModelBase):
    __tablename__ = 'mock_project'

    id = db.Column(db.Integer, primary_key=True)
    name = db.Column(db.String(50), nullable=False)
    description = db.Column(db.String(512))
    color = db.Column(db.String(50), comment='项目主题色')
    status = db.Column(db.Integer, server_default='1', comment='状态:1 正常,-1 删除')
    create_time = db.Column(db.DateTime, server_default=func.datetime('now', 'localtime'))
    update_time = db.Column(db.DateTime, server_default=func.datetime('now', 'localtime'),
                            server_onupdate=func.datetime('now', 'localtime'))

    def __repr__(self):
        return '<MockProject: id={} name={}>'.format(self.id, self.name)
Example #6
0
def process_vouchers():
    # Active vouchers that should end
    vouchers = Voucher.query \
                .filter(Voucher.status == 'active') \
                .all()

    for voucher in vouchers:
        if voucher.should_end():
            voucher.end()
    
    # New vouchers that are unused and should expire
    max_age = app.config.get('VOUCHER_MAXAGE', 120)
    vouchers = Voucher.query \
                .filter(Voucher.status == 'new') \
                .all()

    for voucher in vouchers:
        if voucher.should_expire():
            voucher.expire()

    # Blocked, ended and expired vouchers that should be archived
    vouchers = Voucher.query \
                .filter(func.datetime(Voucher.updated_at, str(max_age) + ' minutes') < func.current_timestamp()) \
                .filter(Voucher.status.in_([ 'blocked', 'ended', 'expired' ])) \
                .all()

    for voucher in vouchers:
        voucher.archive()

    db.session.commit()
Example #7
0
 def datetime_end_access(cls):
     dt_column = (func.substr(cls.dt_end_access, 7, 4) + "-" +
                  func.substr(cls.dt_end_access, 4, 2) + "-" +
                  func.substr(cls.dt_end_access, 1, 2) + ' ' +
                  func.substr(cls.dt_end_access, 12))
     dt_column = func.datetime(dt_column)
     return dt_column
Example #8
0
    def get_aggregated(
        cls,
        signal_type: str,
        *,
        aggregate_function: typing.Callable = sa_func.avg,
        date_range: typing.Tuple[datetime.datetime, datetime.datetime]
    ) -> typing.List['Signal']:
        query_data = cls._get_query_data(
            signal_type=signal_type,
            date_range=date_range,
        )

        if not query_data:
            return []

        signals = db.db_session().query(
            aggregate_function(cls.value).label('value'),
            cls.received_at.label('received_at'),
            sa_func.datetime(
                sa_func.strftime(query_data['time_tpl'], cls.received_at),
                type_=DateTime,
            ).label('aggregated_time'),
        ).filter(
            cls.received_at >= query_data['start_time'],
            cls.received_at <= query_data['end_time'],
            cls.type == signal_type,
            cls.value.isnot(None),
        ).group_by('aggregated_time', ).order_by(cls.received_at, ).all()

        return signals
Example #9
0
def query_tobs():

    last_date = session.query(Measurement.date).order_by(desc(
        Measurement.date)).first()
    result = [str(datestr) for datestr in last_date]
    last_date_split = result[0].split('-')
    last_12_month_date = dt.date(int(last_date_split[0]),
                                 int(last_date_split[1]),
                                 int(last_date_split[2])) - dt.timedelta(365)

    last_12_month_date = dt.date(int(last_date_split[0]),
                                 int(last_date_split[1]),
                                 int(last_date_split[2])) - dt.timedelta(365)


    tobs_lastyear_query = session.query(Measurement.date,Station.station,Measurement.tobs).\
                        filter(func.datetime(Measurement.date) >= last_12_month_date).\
                        order_by(Measurement.date).\
                        all()
    tobs_lastyear_list = []
    for i in range(len(tobs_lastyear_query)):
        tobs_lastyear_list.append({
            'date': tobs_lastyear_query[i][0],
            'station': tobs_lastyear_query[i][1],
            'temperature': tobs_lastyear_query[i][2]
        })

    return jsonify(tobs_lastyear_list)
Example #10
0
def get_tobs_most_active():
    # Create our session (link) from Python to the DB
    session = Session(engine)

    measurement_stn_grp = session.query(Measurement.station,func.count(Measurement.station)).\
        group_by(Measurement.station).\
        order_by(func.count(Measurement.station).desc()).all() 

    latest_measure = session.query(Measurement).order_by(Measurement.date.desc()).limit(1).all()
    last_date_list = latest_measure[0].date.split('-')
    last12_start = dt.date(int(last_date_list[0]),int(last_date_list[1]),int(last_date_list[2])) - dt.timedelta(365)

    tob_last12_measure = session.query(Measurement.date,Measurement.station,Measurement.tobs).\
                filter(func.datetime(Measurement.date) > last12_start).\
                filter(Measurement.station == measurement_stn_grp[0][0]).\
                order_by(Measurement.date).all()
   
    session.close()

    # Create a dictionary from the row data and append 
    tobs_list = []
    for i in range(len(tob_last12_measure)):
        tobs_list.append({'date':tob_last12_measure[i][0],\
            'station':tob_last12_measure[i][1],'temperature':tob_last12_measure[i][2]})

    return jsonify(tobs_list)
Example #11
0
 def get_export_data(self, pagesize, pageindex):
     export_phones = self.session.query(Phones).with_entities(
         Phones.phone, Phones.keyword, Phones.source_url,
         func.datetime(Phones.create_time, 'unixepoch',
                       'localtime').label('create_time_zh'))
     res = export_phones.order_by(Phones.id).limit(pagesize).offset(
         (pageindex - 1) * pagesize).all()
     return res
Example #12
0
def predictions_get():
    tips = db.session.query(Tip).filter(Tip.Status == 1)\
                                .filter(func.datetime(Tip.KickOff) >= datetime.utcnow())\
                                .all()

    result = [{col: getattr(t, col)
               for col in Tip.json_attributes} for t in tips]
    return jsonify(predictions=result)
Example #13
0
def get_temperature(startDate, endDate):
    startD = dt.datetime.strptime(startDate, '%Y-%m-%d')
    endD = dt.datetime.strptime(endDate, '%Y-%m-%d')    
    session = Session(engine)
    sel = [func.min(Measurement.tobs), \
        func.avg(Measurement.tobs),func.max(Measurement.tobs)]
    results = session.query(*sel).\
        filter(func.datetime(Measurement.date) >= startD).\
        filter(func.datetime(Measurement.date) <= endD).\
        order_by(Measurement.date).all()   
    tobs_list = []
    for i in range(len(results)):
        tobs_list.append({
            'TMIN':results[i][0],\
            'TAVG':results[i][1],\
            'TMAX':results[i][2]})

    return tobs_list
Example #14
0
 def record_query(self, limitIndex):
     pagecount = 100
     limitnum = (limitIndex - 1) * pagecount
     res = self.session.query(Phones).with_entities(
         Phones.phone, Phones.keyword, Phones.source_url,
         func.datetime(
             Phones.create_time, 'unixepoch',
             'localtime').label('create_time_zh')).order_by(
                 Phones.id).limit(pagecount).offset(limitnum).all()
     return res
Example #15
0
 def __fetch(self,
             session,
             instrument,
             to_date=datetime.datetime(2019, 1, 1, 0, 0, 0),
             from_date=datetime.datetime(2012, 1, 1, 0, 0, 0)):
     """
         选择一段时间的交易记录,并且返回
         from_date: 开始时间
         to_date: 结束时间
         type: 种类
     """
     cursor = session.query(self.model).filter(
         and_(
             cast(self.model.type, String) == cast(instrument, String),
             self.model.start_date >= func.datetime(from_date),
             self.model.end_date <=
             func.datetime(to_date))).order_by("start_date")
     result = cursor.all()
     return result
Example #16
0
 def get_export_phone_sms_logs_data(self, pagesize, pageindex):
     export_phone_sms_logs = self.session.query(PhoneSmsLogs).with_entities(
         PhoneSmsLogs.phone, PhoneSmsLogs.keyword, PhoneSmsLogs.content,
         PhoneSmsLogs.respcode, PhoneSmsLogs.respdesc,
         func.datetime(PhoneSmsLogs.create_time, 'unixepoch',
                       'localtime').label('create_time_zh'))
     res = export_phone_sms_logs.order_by(
         PhoneSmsLogs.id).limit(pagesize).offset(
             (pageindex - 1) * pagesize).all()
     return res
Example #17
0
 def record_sms_log_query(self, limitIndex):
     pagecount = 500
     limitnum = (limitIndex - 1) * pagecount
     res = self.session.query(PhoneSmsLogs).with_entities(
         PhoneSmsLogs.phone, PhoneSmsLogs.keyword, PhoneSmsLogs.content,
         PhoneSmsLogs.respcode, PhoneSmsLogs.respdesc,
         func.datetime(
             PhoneSmsLogs.create_time, 'unixepoch',
             'localtime').label('create_time_zh')).order_by(
                 PhoneSmsLogs.id).limit(pagecount).offset(limitnum).all()
     return res
Example #18
0
class BaseModel(Base):
    __abstract__ = True

    id = Column(Integer,
                nullable=False,
                unique=True,
                primary_key=True,
                autoincrement=True)
    created_at = Column(TIMESTAMP,
                        nullable=False,
                        default=datetime.datetime.utcnow,
                        server_default=func.datetime('now'))
    updated_at = Column(TIMESTAMP,
                        nullable=False,
                        default=datetime.datetime.utcnow,
                        server_default=func.datetime('now'),
                        onupdate=func.datetime('now'))

    def __repr__(self):
        return f'{self.__name__}'
Example #19
0
def sync_shelves(sync_token, sync_results):
    new_tags_last_modified = sync_token.tags_last_modified

    for shelf in ub.session.query(ub.ShelfArchive).filter(
            func.datetime(
                ub.ShelfArchive.last_modified) > sync_token.tags_last_modified,
            ub.ShelfArchive.user_id == current_user.id):
        new_tags_last_modified = max(shelf.last_modified,
                                     new_tags_last_modified)

        sync_results.append({
            "DeletedTag": {
                "Tag": {
                    "Id":
                    shelf.uuid,
                    "LastModified":
                    convert_to_kobo_timestamp_string(shelf.last_modified)
                }
            }
        })

    for shelf in ub.session.query(ub.Shelf).filter(
            func.datetime(
                ub.Shelf.last_modified) > sync_token.tags_last_modified,
            ub.Shelf.user_id == current_user.id):
        if not shelf_lib.check_shelf_view_permissions(shelf):
            continue

        new_tags_last_modified = max(shelf.last_modified,
                                     new_tags_last_modified)

        tag = create_kobo_tag(shelf)
        if not tag:
            continue

        if shelf.created > sync_token.tags_last_modified:
            sync_results.append({"NewTag": tag})
        else:
            sync_results.append({"ChangedTag": tag})
    sync_token.tags_last_modified = new_tags_last_modified
    ub.session.commit()
Example #20
0
    def main(self):
        camera_id = 1
        timespec = '20220201'

        d_dayDate = datetime.strptime(timespec, '%Y%m%d').date()
        night = True

        createDate_local = func.datetime(
            IndiAllSkyDbImageTable.createDate, 'localtime',
            type_=DateTime).label('createDate_local')

        #timelapse_files_entries = db.session.query(
        #    IndiAllSkyDbImageTable,
        #    createDate_local,
        #)\
        #    .join(IndiAllSkyDbImageTable.camera)\
        #    .filter(IndiAllSkyDbCameraTable.id == camera_id)\
        #    .filter(IndiAllSkyDbImageTable.dayDate == d_dayDate)\
        #    .filter(IndiAllSkyDbImageTable.night == night)\
        #    .order_by(IndiAllSkyDbImageTable.createDate.asc())

        timelapse_files_entries = IndiAllSkyDbImageTable.query\
            .add_columns(createDate_local)\
            .join(IndiAllSkyDbCameraTable)\
            .filter(IndiAllSkyDbCameraTable.id == camera_id)\
            .filter(IndiAllSkyDbImageTable.dayDate == d_dayDate)\
            .filter(IndiAllSkyDbImageTable.night == night)\
            .order_by(IndiAllSkyDbImageTable.createDate.asc())

        now_minus_3h = datetime.now() - timedelta(hours=3)

        createDate_s = func.strftime('%s',
                                     IndiAllSkyDbImageTable.createDate,
                                     type_=Integer)
        image_lag_list = IndiAllSkyDbImageTable.query\
            .add_columns(
                IndiAllSkyDbImageTable.id,
                (createDate_s - func.lag(createDate_s).over(order_by=IndiAllSkyDbImageTable.createDate)).label('lag_diff'),
            )\
            .filter(IndiAllSkyDbImageTable.createDate > now_minus_3h)\
            .order_by(IndiAllSkyDbImageTable.createDate.desc())\
            .limit(50)

        start = time.time()

        #logger.warning('Entries: %d', timelapse_files_entries.count())
        logger.warning('Entries: %d', image_lag_list.count())

        elapsed_s = time.time() - start
        logger.info('SQL executed in %0.4f s', elapsed_s)

        #logger.info('SQL: %s', timelapse_files_entries)
        logger.info('SQL: %s', image_lag_list)
Example #21
0
def tobs():
    # * query for the dates and temperature observations from a year from the last data point.

    session = Session(engine)
    last_date = session.query(Measurement.date).order_by(desc(
        Measurement.date)).first()[0]
    end_date = dt.datetime.strptime(last_date, "%Y-%m-%d")
    start_date = end_date - dt.timedelta(365)
    tobsresult = session.query(Measurement.date, Measurement.tobs).filter(
        func.datetime(Measurement.date) >= start_date).order_by(
            Measurement.date).all()
    return jsonify(tobsresult)
Example #22
0
def background_job(session_mgr):
    try:
        rf_refresh = 600
        while True:
            logger.debug("background_job(): wake up")
            try:
                session = session_mgr()
                session.query(HALDB.DataEntry).filter(
                    HALDB.DataEntry.created_on < func.datetime('now', '-30 day')).delete(
                    synchronize_session=False)
                session.query(HALDB.WeatherHistory).filter(
                    HALDB.WeatherHistory.created_on < func.datetime('now', '-30 day')).delete(
                    synchronize_session=False)
                session.expire_all()
                session.commit()
                session_mgr.remove()
            except:
                logger.error(str(traceback.format_exc()))
            logger.debug("background_job(): done")
            time.sleep(rf_refresh)
    except:
        logger.error(str(traceback.format_exc()))
Example #23
0
def HandleSyncRequest():
    sync_token = SyncToken.SyncToken.from_headers(request.headers)
    log.info("Kobo library sync request received.")
    if not current_app.wsgi_app.is_proxied:
        log.debug('Kobo: Received unproxied request, changed request port to server port')

    # TODO: Limit the number of books return per sync call, and rely on the sync-continuatation header
    # instead so that the device triggers another sync.

    new_books_last_modified = sync_token.books_last_modified
    new_books_last_created = sync_token.books_last_created
    entitlements = []

    # We reload the book database so that the user get's a fresh view of the library
    # in case of external changes (e.g: adding a book through Calibre).
    db.reconnect_db(config)

    # sqlite gives unexpected results when performing the last_modified comparison without the datetime cast.
    # It looks like it's treating the db.Books.last_modified field as a string and may fail
    # the comparison because of the +00:00 suffix.
    changed_entries = (
        db.session.query(db.Books)
        .join(db.Data)
        .filter(func.datetime(db.Books.last_modified) > sync_token.books_last_modified)
        .filter(db.Data.format.in_(KOBO_FORMATS))
        .all()
    )

    for book in changed_entries:
        entitlement = {
            "BookEntitlement": create_book_entitlement(book),
            "BookMetadata": get_metadata(book),
            "ReadingState": reading_state(book),
        }
        if book.timestamp > sync_token.books_last_created:
            entitlements.append({"NewEntitlement": entitlement})
        else:
            entitlements.append({"ChangedEntitlement": entitlement})

        new_books_last_modified = max(
            book.last_modified, sync_token.books_last_modified
        )
        new_books_last_created = max(book.timestamp, sync_token.books_last_created)

    sync_token.books_last_created = new_books_last_created
    sync_token.books_last_modified = new_books_last_modified

    if config.config_kobo_proxy:
        return generate_sync_response(request, sync_token, entitlements)

    return make_response(jsonify(entitlements))
Example #24
0
def expire_vouchers():
    # Active vouchers that should end
    vouchers = Voucher.query \
                .filter(func.datetime(Voucher.started_at, Voucher.minutes + ' minutes') < func.current_timestamp()) \
                .filter(Voucher.status == 'active') \
                .all()

    for voucher in vouchers:
        if voucher.should_end():
            voucher.end()
    
    # New vouchers that are unused and should expire
    max_age = app.config.get('VOUCHER_MAXAGE', 120)
    vouchers = Voucher.query \
                .filter(func.datetime(Voucher.created_at, str(max_age) + ' minutes') < func.current_timestamp()) \
                .filter(Voucher.status == 'new') \
                .all()

    for voucher in vouchers:
        if voucher.should_expire():
            voucher.expire()

    db.session.commit()
Example #25
0
    def mailsend(start, stop):
        """send expiring access today."""
        print("start: %s stop: %s" % (start, stop))
        if current_app.config['MAIL_ENABLED'] is False:
            print("mail sending is disabled")
            return

        access = Access.query.filter(
            func.datetime(Access.stop) > start,
            func.datetime(Access.stop) < stop).all()

        from app.email import send_email

        for a in access:
            stopdate = a.stop.strftime("%Y-%m-%d")

            send_email(f'[PYRA] Role: {a.role.name} is expiring at {stopdate}',
                       sender=current_app.config['MAIL_DEFAULT_SENDER'],
                       recipients=[a.user.email],
                       text_body=render_template('email/expiring_access.txt',
                                                 access=a),
                       html_body=render_template('email/expiring_access.html',
                                                 access=a))
Example #26
0
    def test_render_server_default_expr_needs_parens(self):
        c = Column(
            "date_value",
            DateTime(),
            server_default=func.datetime("now", "localtime"),
        )

        result = autogenerate.render._render_column(c, self.autogen_context)
        eq_ignore_whitespace(
            result,
            "sa.Column('date_value', sa.DateTime(), "
            "server_default=sa.text(!U\"(datetime('now', 'localtime'))\"), "
            "nullable=True)",
        )
Example #27
0
def expire_vouchers():
    # Active vouchers that should end
    vouchers = Voucher.query \
                .filter(func.datetime(Voucher.started_at, Voucher.minutes + ' minutes') < func.current_timestamp()) \
                .filter(Voucher.status == 'active') \
                .all()

    for voucher in vouchers:
        if voucher.should_end():
            voucher.end()

    # New vouchers that are unused and should expire
    max_age = app.config.get('VOUCHER_MAXAGE', 120)
    vouchers = Voucher.query \
                .filter(func.datetime(Voucher.created_at, str(max_age) + ' minutes') < func.current_timestamp()) \
                .filter(Voucher.status == 'new') \
                .all()

    for voucher in vouchers:
        if voucher.should_expire():
            voucher.expire()

    db.session.commit()
Example #28
0
def test_calculating_age_on_the_fly(session):
    """Test if the result's have a custom field."""
    query = session.query().filter(User.id > 5)

    columns = [
        ColumnDT(User.id, search_method='numeric'),
        ColumnDT(User.name, search_method='string_contains'),
        ColumnDT(User.birthday, search_method='date'),
        ColumnDT(func.datetime('now') - User.birthday, search_method='numeric')
    ]

    params = create_dt_params(columns)
    rowTable = DataTables(params, query, columns)
    res = rowTable.output_result()

    assert len(res['data']) == 10
    def test_calculating_age_on_the_fly(self):
        self.populate(9)
        query = self.session.query().filter(User.id > 3)

        columns = [
            ColumnDT(User.id, search_method='numeric'),
            ColumnDT(User.name, search_method='string_contains'),
            ColumnDT(User.birthday, search_method='date'),
            ColumnDT(func.datetime('now') - User.birthday,
                     search_method='numeric'),
        ]

        req = self.create_dt_params()
        rowTable = DataTables(req, query, columns)
        res = rowTable.output_result()
        assert len(res['data']) == 6
Example #30
0
File: db.py Project: gh2o/ScanInOut
def epoch_to_utc_timestamp (ts):
	return func.datetime (ts, 'unixepoch')
Example #31
0
 def test_compare_current_timestamp_func(self):
     self._compare_default_roundtrip(DateTime(),
                                     func.datetime("now", "localtime"))
Example #32
0
File: db.py Project: gh2o/ScanInOut
def utc_timestamp_to_local_timestamp (ts):
	return func.datetime (ts, 'localtime')
Example #33
0
File: db.py Project: gh2o/ScanInOut
def local_timestamp_to_utc_timestamp (ts):
	return func.datetime (ts, 'utc')
Example #34
0
def HandleSyncRequest():
    sync_token = SyncToken.SyncToken.from_headers(request.headers)
    log.info("Kobo library sync request received.")
    if not current_app.wsgi_app.is_proxied:
        log.debug(
            'Kobo: Received unproxied request, changed request port to server port'
        )

    # TODO: Limit the number of books return per sync call, and rely on the sync-continuatation header
    # instead so that the device triggers another sync.

    new_books_last_modified = sync_token.books_last_modified
    new_books_last_created = sync_token.books_last_created
    new_reading_state_last_modified = sync_token.reading_state_last_modified
    sync_results = []

    # We reload the book database so that the user get's a fresh view of the library
    # in case of external changes (e.g: adding a book through Calibre).
    calibre_db.reconnect_db(config, ub.app_DB_path)

    archived_books = (ub.session.query(ub.ArchivedBook).filter(
        ub.ArchivedBook.user_id == int(current_user.id)).all())

    # We join-in books that have had their Archived bit recently modified in order to either:
    #   * Restore them to the user's device.
    #   * Delete them from the user's device.
    # (Ideally we would use a join for this logic, however cross-database joins don't look trivial in SqlAlchemy.)
    recently_restored_or_archived_books = []
    archived_book_ids = {}
    new_archived_last_modified = datetime.datetime.min
    for archived_book in archived_books:
        if archived_book.last_modified > sync_token.archive_last_modified:
            recently_restored_or_archived_books.append(archived_book.book_id)
        if archived_book.is_archived:
            archived_book_ids[archived_book.book_id] = True
        new_archived_last_modified = max(new_archived_last_modified,
                                         archived_book.last_modified)

    # sqlite gives unexpected results when performing the last_modified comparison without the datetime cast.
    # It looks like it's treating the db.Books.last_modified field as a string and may fail
    # the comparison because of the +00:00 suffix.
    changed_entries = (calibre_db.session.query(db.Books).join(db.Data).filter(
        or_(
            func.datetime(db.Books.last_modified) >
            sync_token.books_last_modified,
            db.Books.id.in_(recently_restored_or_archived_books))).filter(
                db.Data.format.in_(KOBO_FORMATS)).all())

    reading_states_in_new_entitlements = []
    for book in changed_entries:
        kobo_reading_state = get_or_create_reading_state(book.id)
        entitlement = {
            "BookEntitlement":
            create_book_entitlement(book,
                                    archived=(book.id in archived_book_ids)),
            "BookMetadata":
            get_metadata(book),
        }

        if kobo_reading_state.last_modified > sync_token.reading_state_last_modified:
            entitlement["ReadingState"] = get_kobo_reading_state_response(
                book, kobo_reading_state)
            new_reading_state_last_modified = max(
                new_reading_state_last_modified,
                kobo_reading_state.last_modified)
            reading_states_in_new_entitlements.append(book.id)

        if book.timestamp > sync_token.books_last_created:
            sync_results.append({"NewEntitlement": entitlement})
        else:
            sync_results.append({"ChangedEntitlement": entitlement})

        new_books_last_modified = max(book.last_modified,
                                      new_books_last_modified)
        new_books_last_created = max(book.timestamp, new_books_last_created)

    changed_reading_states = (ub.session.query(ub.KoboReadingState).filter(
        and_(
            func.datetime(ub.KoboReadingState.last_modified) >
            sync_token.reading_state_last_modified,
            ub.KoboReadingState.user_id == current_user.id,
            ub.KoboReadingState.book_id.notin_(
                reading_states_in_new_entitlements))))
    for kobo_reading_state in changed_reading_states.all():
        book = calibre_db.session.query(db.Books).filter(
            db.Books.id == kobo_reading_state.book_id).one_or_none()
        if book:
            sync_results.append({
                "ChangedReadingState": {
                    "ReadingState":
                    get_kobo_reading_state_response(book, kobo_reading_state)
                }
            })
            new_reading_state_last_modified = max(
                new_reading_state_last_modified,
                kobo_reading_state.last_modified)

    sync_shelves(sync_token, sync_results)

    sync_token.books_last_created = new_books_last_created
    sync_token.books_last_modified = new_books_last_modified
    sync_token.archive_last_modified = new_archived_last_modified
    sync_token.reading_state_last_modified = new_reading_state_last_modified

    return generate_sync_response(sync_token, sync_results)
Example #35
0
 def now_sql(self):
     if self._engine.driver == "psycopg2":
         return func.now()
     else:
         # 1 Minute modifier to fix strange unit test race
         return func.datetime("now", "localtime", "+1 minutes")
Example #36
0
File: db.py Project: eklitzke/graff
import datetime
import hashlib
import os
from sqlalchemy import create_engine, func, Column, ForeignKey
from sqlalchemy.types import Integer, String, Float, DateTime, Boolean
from sqlalchemy.orm import sessionmaker, relationship, backref
from sqlalchemy.ext.declarative import declarative_base
import warnings

from graff import config
from graff import crypto
from graff import geo

if config.get('memory', True):
    engine = create_engine('sqlite:///:memory:')
    now = func.datetime()
else:
    engine = create_engine('mysql+mysqldb://' +
                           config.get('db_user', 'graff') + ':' +
                           config.get('db_pass', 'gr4ff') + '@' +
                           config.get('db_host', '127.0.0.1') + '/' +
                           config.get('db_schema', 'graff'), pool_recycle=3600)
    now = func.now()

Session = sessionmaker(bind=engine)

class _Base(object):

    @property
    def encid(self):
        if hasattr(self, 'secret_key'):
Example #37
0
File: db.py Project: gh2o/ScanInOut
def epoch_to_local_timestamp (ts):
	return func.datetime (ts, 'unixepoch', 'localtime')
Example #38
0
 def now_sql(self):
     if self._engine.driver == "psycopg2":
         return func.now()
     else:
         # 1 Minute modifier to fix strange unit test race
         return func.datetime("now", "localtime", "+1 minutes")
Example #39
0
def tobs():
    one_year_ago = dt.date(2017,8,23) - dt.timedelta(days=365)
        last_year_data = session.query(Measurement.tobs).\
        filter(func.datetime(Measurement.date) >= one_year_ago ).\
        filter(Measurement.tobs != 'None').\
        order_by(func.datetime(Measurement.date)).all()