def test_join_arithmetic_field(self): q = Query.from_(self.table0).join(self.table1).on( self.table0.dt == (self.table1.dt - Interval(weeks=1))).select('*') self.assertEqual( 'SELECT * FROM "abc" ' 'JOIN "efg" ON "abc"."dt"="efg"."dt"-INTERVAL \'1 WEEK\'', str(q))
def test_join_with_arithmetic_function_in_select(self): q = Query.from_(self.table0, ).join(self.table1).on( self.table0.dt == (self.table1.dt - Interval(weeks=1))).select( self.table0.fiz - self.table0.buz, self.table1.star) self.assertEqual( 'SELECT "abc"."fiz"-"abc"."buz","efg".* FROM "abc" ' 'JOIN "efg" ON "abc"."dt"="efg"."dt"-INTERVAL \'1 WEEK\'', str(q))
def bonus_filters(query, add_param, endpoint_users, **kwargs): endpoints = Table('endpoints') return (query.where( Exists( Query.from_(endpoints).where( endpoints.id == endpoint_users.endpoint_id). where(endpoints.sunsets_on > Now() - Interval(days=27)).where( endpoints.sunsets_on < Now()).where( DatePart('day', endpoints.sunsets_on - Now()) < (30 - Floor( DatePart( 'day', endpoint_users.created_at - endpoints.sunsets_on) / 3) * 3)))))
def test_authtoken_to_users_me(self): with helper.clear_tables(self.conn, self.cursor, ['users']): users = Table('users') self.cursor.execute( Query.into(users).columns(users.username).insert( Parameter('%s')).returning(users.id).get_sql(), ('testuser', )) (user_id, ) = self.cursor.fetchone() authtokens = Table('authtokens') self.cursor.execute( Query.into(authtokens).columns( authtokens.user_id, authtokens.token, authtokens.expires_at, authtokens.source_type, authtokens.source_id).insert(Parameter('%s'), Parameter('%s'), Now() + Interval(hours=1), Parameter('%s'), Parameter('%s')).get_sql(), (user_id, 'testtoken', 'other', 1)) self.conn.commit() r = requests.get(f'{HOST}/users/{user_id}/me', headers={'Authorization': 'bearer testtoken'}) r.raise_for_status() self.assertEqual(r.status_code, 200) body = r.json() self.assertIsInstance(body, dict) self.assertIsInstance(body.get('username'), str) self.assertEqual(len(body), 1) self.assertEqual(body['username'], 'testuser') # headers self.assertIsInstance(r.headers.get('cache-control'), str) cc = r.headers.get('cache-control') self.assertIn('private', cc) self.assertIn('max-age', cc) self.assertIn('stale-while-revalidate', cc) self.assertIn('stale-if-error', cc) split_cache_control = cc.split(', ') split_cache_control.remove('private') cc_args = dict([itm.split('=') for itm in split_cache_control]) for key in list(cc_args.keys()): cc_args[key] = int(cc_args[key]) self.assertGreater(cc_args['max-age'], 0) self.assertGreater(cc_args['stale-while-revalidate'], 0) self.assertGreater(cc_args['stale-if-error'], 0)
def scan_for_expired_temp_bans(itgs: LazyIntegrations, version: float) -> None: """Scans for any expired temporary bans in the temporary_bans table. For any rows that are found the corresponding users permission cache is flushed and the row is deleted.""" temp_bans = Table('temporary_bans') users = Table('users') limit_per_iteration = 100 # I don't anticipate there being that many temp bans that expire, so the # fact this races isn't that big of a concern. Furthermore, flushing the # cache on the same user twice in a row won't cause any issues. However, # I still implement the limit and looping to avoid OOM if for some reason # once in a blue moon a ton of temporary bans expire at once while True: itgs.write_cursor.execute( Query.from_(temp_bans).join(users).on( users.id == temp_bans.user_id).select( temp_bans.id, users.username, temp_bans.subreddit, temp_bans.created_at, temp_bans.ends_at).where( temp_bans.ends_at < Now() + Interval(minutes=1)).limit( limit_per_iteration).get_sql()) rows = itgs.read_cursor.fetchall() for (rowid, username, subreddit, created_at, ends_at) in rows: itgs.logger.print( Level.INFO, 'Detected a temporary ban on /u/{} in /r/{} at {} expired at {}; ' + ' clearing users permission cache [rowid = {}]', username, subreddit, created_at, ends_at, rowid) flush_cache(itgs, username) if rows: itgs.write_cursor.execute( Query.from_(temp_bans).delete().where( temp_bans.id.isin([Parameter('%s') for _ in rows])).get_sql(), [row[0] for row in rows]) itgs.write_conn.commit() if len(rows) < limit_per_iteration: break
def delete_outdated(self, days_interval: int = 7): news_expiry_date = fn.Now() - fn.Date(Interval(days=days_interval)) bare_query = Query.from_(self._table).delete().where( news_expiry_date > fn.Date(self._fields['publish_date'])) query = query_to_str(bare_query) rows_count = 0 try: self._cursor.execute(query) except DBError as e: logging.error( f"Repository [{self._table}]: delete_outdated -> [{e.errno}]{e.msg}" ) self._connection.rollback() else: self._connection.commit() rows_count = self._cursor.rowcount logging.info( f"Repository [{self._table}]: delete_outdated -> Deleted {rows_count} record/s" ) return rows_count
def test_add_day_second(self): c = dt + Interval(days=1, seconds=1) self.assertEqual("\"dt\"+INTERVAL \'1 0:0:1 DAY_SECOND\'", str(c))
def test_add_hour_minute(self): c = dt + Interval(hours=1, minutes=1) self.assertEqual("\"dt\"+INTERVAL \'1:1 HOUR_MINUTE\'", str(c))
def test_add_hour_microsecond(self): c = dt + Interval(hours=1, microseconds=1) self.assertEqual("\"dt\"+INTERVAL \'1:0:0.1 HOUR_MICROSECOND\'", str(c))
def test_add_minute_microsecond(self): c = dt + Interval(minutes=1, microseconds=1) self.assertEqual("\"dt\"+INTERVAL \'1:0.1 MINUTE_MICROSECOND\'", str(c))
def test_add_year(self): c = dt + Interval(years=1) self.assertEqual("\"dt\"+INTERVAL \'1 YEAR\'", str(c))
def test_add_month(self): c = dt + Interval(months=1) self.assertEqual("\"dt\"+INTERVAL \'1 MONTH\'", str(c))
def test_oracle_dialect_uses_single_quotes_around_expression_in_an_interval( self): c = Interval(days=1).get_sql(dialect=Dialects.ORACLE) self.assertEqual("INTERVAL '1' DAY", c)
def test_mysql_dialect_does_not_use_quotes_around_interval(self): c = Interval(days=1).get_sql(dialect=Dialects.MYSQL) self.assertEqual("INTERVAL 1 DAY", c)
def test_add_value_complex_expressions(self): c = dt + Interval(quarters=1) + Interval(weeks=1) self.assertEqual("\"dt\"+INTERVAL \'1 QUARTER\'+INTERVAL \'1 WEEK\'", str(c))
def test_add_day(self): c = dt + Interval(days=1) self.assertEqual("\"dt\"+INTERVAL \'1 DAY\'", str(c))
def test_add_week(self): c = dt + Interval(weeks=1) self.assertEqual("\"dt\"+INTERVAL \'1 WEEK\'", str(c))
def test_vertica_dialect_uses_single_quotes_around_interval(self): c = Interval(days=1).get_sql(dialect=Dialects.VERTICA) self.assertEqual("INTERVAL '1 DAY'", c)
def test_add_quarter(self): c = dt + Interval(quarters=1) self.assertEqual("\"dt\"+INTERVAL \'1 QUARTER\'", str(c))
def test_redshift_dialect_uses_single_quotes_around_interval(self): c = Interval(days=1).get_sql(dialect=Dialects.REDSHIFT) self.assertEqual("INTERVAL '1 DAY'", c)
def test_add_second_microsecond(self): c = dt + Interval(seconds=1, microseconds=1) self.assertEqual("\"dt\"+INTERVAL \'1.1 SECOND_MICROSECOND\'", str(c))
def test_postgresql_dialect_uses_single_quotes_around_interval(self): c = Interval(days=1).get_sql(dialect=Dialects.POSTGRESQL) self.assertEqual("INTERVAL '1 DAY'", c)
def test_add_minute_second(self): c = dt + Interval(minutes=1, seconds=1) self.assertEqual("\"dt\"+INTERVAL \'1:1 MINUTE_SECOND\'", str(c))
def test_do_not_truncate_integer_values(self): i = Interval(seconds=10) self.assertEqual("INTERVAL \'10 SECOND\'", str(i))
def test_add_hour_second(self): c = dt + Interval(hours=1, seconds=1) self.assertEqual("\"dt\"+INTERVAL \'1:0:1 HOUR_SECOND\'", str(c))
def test_do_not_truncate_months_between_years_and_datys(self): i = Interval(years=10, days=10) self.assertEqual("INTERVAL \'10-0-10 YEAR_DAY\'", str(i))
def test_add_day_microsecond(self): c = dt + Interval(days=1, microseconds=1) self.assertEqual("\"dt\"+INTERVAL \'1 0:0:0.1 DAY_MICROSECOND\'", str(c))
def test_add_second(self): c = dt + Interval(seconds=1) self.assertEqual("\"dt\"+INTERVAL \'1 SECOND\'", str(c))
def test_add_day_minute(self): c = dt + Interval(days=1, minutes=1) self.assertEqual("\"dt\"+INTERVAL \'1 0:1 DAY_MINUTE\'", str(c))
def test_add_minute(self): c = dt + Interval(minutes=1) self.assertEqual("\"dt\"+INTERVAL \'1 MINUTE\'", str(c))