def test_insert_returning_all_fields_and_arithmetics(self): query = PostgreSQLQuery.into(self.table_abc).insert(1).returning( self.table_abc.star, self.table_abc.f1 + self.table_abc.f2 ) self.assertEqual('INSERT INTO "abc" VALUES (1) RETURNING *,f1+f2', str(query))
def test_insert_all_columns_multi_rows_chained_returning_star_and_id(self): query = PostgreSQLQuery.into( self.table_abc ).insert(1, 'a', True).insert(2, 'b', False).returning( self.table_abc.name, self.table_abc.star, self.table_abc.id, ) self.assertEqual('INSERT INTO "abc" VALUES (1,\'a\',true),(2,\'b\',false) RETURNING *', str(query))
def find_or_create_money(itgs: LazyItgs, money: Money, amount_usd_cents: int) -> int: """Find or create a row in the moneys table that matches the given money object. Arguments: - `itgs (LazyItgs)`: The integrations to use to connect to networked services. - `money (Money)`: The money object to create a corresponding row for. - `amount_usd_cents (int)`: The money amount in USD cents at the current conversion rate. """ currencies = Table('currencies') moneys = Table('moneys') (currency_id, ) = query_helper.find_or_create_or_find( itgs, (Query.from_(currencies).select( currencies.id).where(currencies.code == Parameter('%s')).get_sql(), (money.currency, )), (Query.into(currencies).columns( currencies.code, currencies.symbol, currencies.symbol_on_left, currencies.exponent).insert(*(Parameter('%s') for _ in range(4))).returning( currencies.id).get_sql(), (money.currency, money.symbol or f' {money.currency}', money.symbol_on_left if money.symbol is not None else False, money.exp or 2))) (money_id, ) = query_helper.find_or_create_or_find( itgs, (Query.from_(moneys).select( moneys.id).where(moneys.currency_id == Parameter('%s')).where( moneys.amount == Parameter('%s')).where( moneys.amount_usd_cents == Parameter('%s')).get_sql(), (currency_id, money.minor, amount_usd_cents)), (Query.into(moneys).columns( moneys.currency_id, moneys.amount, moneys.amount_usd_cents).insert( *(Parameter('%s') for _ in range(3))).returning(moneys.id).get_sql(), (currency_id, money.minor, amount_usd_cents))) return money_id
def generate_insert_sql(self, document_id: int, value: int, voter_id: int): query = PostgreSQLQuery.into(self.votes_table).columns( 'document_id', 'value', 'voter_id', ).insert( document_id, value, voter_id, ) query = query.on_conflict('document_id', 'voter_id').do_update('value', value) return query.get_sql()
def test_on_conflict_do_update_with_excluded_where(self): qs = ( PostgreSQLQuery.into(self.table_abc) .insert(1, "m") .on_conflict("id") .do_update('abc') .where(self.table_abc.abc.eq(1)) ) self.assertEqual( 'INSERT INTO "abc" VALUES (1,\'m\') ON CONFLICT ("id") DO UPDATE SET "abc"=EXCLUDED."abc" WHERE ' '"abc"."abc"=1', str(qs), )
def test_where_and_on_conflict_where(self): table_bcd = Table('bcd') qs = (PostgreSQLQuery.into( self.table_abc).select(table_bcd.abc).from_(table_bcd).where( table_bcd.abc.eq('1')).on_conflict('id').where( self.table_abc.abc.eq(0)).where( self.table_abc.cde.eq(0)).do_update('field', 'val')) self.assertEqual( 'INSERT INTO "abc" SELECT "abc" FROM "bcd" WHERE "abc"=\'1\' ' 'ON CONFLICT ("id") WHERE "abc"=0 AND "cde"=0 DO UPDATE SET "field"=\'val\'', str(qs))
def test_on_conflict_where_conflict_fields_do_update(self): qs = ( PostgreSQLQuery.into(self.table_abc) .insert(1, "m") .on_conflict('id') .where(self.table_abc.abc.eq(0)) .where(self.table_abc.cde.eq(0)) .do_update('field', 'val') ) self.assertEqual( '''INSERT INTO "abc" VALUES (1,'m') ON CONFLICT ("id") WHERE "abc"=0 AND "cde"=0 ''' '''DO UPDATE SET "field"='val\'''', str(qs), )
def send_alerts_for_user(itgs, alerts_for_user, endpoint_info_by_id, title_format, body_format, alert_type, version): """Sends an alert to the given user to warn them that they are still using deprecated endpoints and inform them of the deprecation/sunset schedule. This will store that we sent them an alert in `endpoint_alerts` and will wait for a response from the reddit proxy so that we don't cause a really long queue to build up. """ date_fmt = '%b %d, %Y' endpoints_table_lines = [ 'Endpoint | Deprecated on | Sunsets on | First Use | Last Use | Count', ':--|:--|:--|:--|:--|:--' ] for alert in alerts_for_user: alert: MissingAlertInfo endpoint: EndpointInfoForAlert = endpoint_info_by_id[alert.endpoint_id] endpoints_table_lines.append( f'[{endpoint.slug}](https://redditloans.com/endpoints.html?slug={endpoint.slug})|' + endpoint.deprecated_on.strftime(date_fmt) + '|' + endpoint.sunsets_on.strftime(date_fmt) + '|' + alert.first_use_in_interval.strftime(date_fmt) + '|' + alert.last_use_in_interval.strftime(date_fmt) + '|' + str(alert.count_in_interval)) username = alerts_for_user[0].username title = title_format.format(username=username) body = body_format.format(username=username, endpoints_table='\n'.join(endpoints_table_lines)) endpoint_alerts = Table('endpoint_alerts') query = (Query.into(endpoint_alerts).columns(endpoint_alerts.endpoint_id, endpoint_alerts.user_id, endpoint_alerts.alert_type)) args = [] for alert in alerts_for_user: query = query.insert(*(Parameter('%s') for _ in range(3))) args.append(alert.endpoint_id) args.append(alert.user_id), args.append(alert_type) itgs.write_cursor.execute(query.get_sql(), args) itgs.write_conn.commit() utils.reddit_proxy.send_request(itgs, 'deprecated_alerts', version, 'compose', { 'recipient': username, 'subject': title, 'body': body })
def scan_for_comments(itgs, version): """Scans for new comments using the given logger and amqp connection""" itgs.logger.print(Level.TRACE, 'Scanning for new comments..') after = None rpiden = 'comments' handled_fullnames = Table('handled_fullnames') while True: comments, after = _fetch_comments(itgs, version, after) if not comments: itgs.logger.print(Level.DEBUG, 'Found no more comments!') break fullnames = [comm['fullname'] for comm in comments] itgs.read_cursor.execute( Query.from_(handled_fullnames) .select('fullname') .where(handled_fullnames.fullname.isin([Parameter('%s') for f in fullnames])) .get_sql(), fullnames ) rows = itgs.read_cursor.fetchall() itgs.read_conn.commit() itgs.logger.print(Level.TRACE, 'Found {} new comments', len(fullnames) - len(rows)) if len(fullnames) == len(rows): break num_to_find = len(fullnames) - len(rows) seen_set = set(row[0] for row in rows) for comment in comments: if comment['fullname'] in seen_set: continue handle_comment(itgs, comment, rpiden, version) itgs.write_cursor.execute( Query.into(handled_fullnames) .columns('fullname') .insert(Parameter('%s')) .get_sql(), (comment['fullname'],) ) itgs.write_conn.commit() num_to_find = num_to_find - 1 if num_to_find <= 0: break
def test_index_no_perm(self): with helper.clear_tables(self.conn, self.cursor, ['responses']): responses = Table('responses') self.cursor.execute( Query.into(responses).columns( responses.name, responses.response_body, responses.description).insert( *[Parameter('%s') for _ in range(3)]).get_sql(), ('foobar', 'body', 'desc')) with helper.user_with_token(self.conn, self.cursor, []) as (user_id, token): r = requests.get(HOST + '/responses', headers={'Authorization': f'bearer {token}'}) self.assertEqual(r.status_code, 403)
async def add_links(ob, links): storage = get_storage() if storage is None: return if hasattr(ob, "context"): uuid = ob.context.uuid else: uuid = ob.uuid query = Query.into(links_table).columns("source_id", "target_id") for link in links: query = query.insert(_safe_uid(str(uuid)), _safe_uid(str(link.uuid))) async with storage.pool.acquire() as conn: await conn.execute(str(query))
def test_insert_all_columns_multi_rows_chained_returning_star_and_id(self): query = ( PostgreSQLQuery.into(self.table_abc) .insert(1, "a", True) .insert(2, "b", False) .returning( self.table_abc.name, self.table_abc.star, self.table_abc.id, ) ) self.assertEqual( "INSERT INTO \"abc\" VALUES (1,'a',true),(2,'b',false) RETURNING *", str(query), )
def test_on_conflict_where_complex(self): table_bcd = Table("bcd") qs = (PostgreSQLQuery.into( self.table_abc).select(table_bcd.abc).from_(table_bcd).where( table_bcd.abc.eq("1")).on_conflict("id").where( self.table_abc.abc.eq(0)).where( self.table_abc.cde.eq(0)).do_update( "field", "val").where(self.table_abc.id.eq(2)).where( self.table_abc.sub_id.eq(3))) self.assertEqual( 'INSERT INTO "abc" SELECT "abc" FROM "bcd" WHERE "abc"=\'1\' ' 'ON CONFLICT ("id") WHERE "abc"=0 AND "cde"=0 ' 'DO UPDATE SET "field"=\'val\' WHERE "abc"."id"=2 AND "abc"."sub_id"=3', str(qs), )
def handle_temporary_ban(itgs: LazyItgs, act: dict) -> None: username: str = act['target_author'] mod_username: str = act['mod'] details: str = act['details'] subreddit = act['subreddit'] try: ban_duration = parsing.temp_ban_parser.parse_temporary_ban(details) except parsing.temp_ban_parser.TempBanDetailsParseError: itgs.logger.exception( Level.WARN, 'The temporary ban to {} has details {}: this could not be ' + 'converted into a duration in seconds. This means we will ' + 'not store the ban in temporary_bans, which means when the ' + 'ban expires we will not flush their permission cache, which ' + 'means it is possible the user will get stuck unable to interact ' + 'with the loansbot. To workaround for this user, add/remove ' + 'contributor once the ban expires.', username, repr(details) ) return banned_user_id = find_or_create_user(itgs, username) mod_user_id = find_or_create_user(itgs, mod_username) temp_bans = Table('temporary_bans') itgs.write_cursor.execute( Query.into(temp_bans) .columns( temp_bans.user_id, temp_bans.mod_user_id, temp_bans.subreddit, temp_bans.ends_at ) .insert(*(Parameter('%s') for _ in range(4))) .get_sql(), (banned_user_id, mod_user_id, subreddit, datetime.fromtimestamp(time.time() + ban_duration)) ) itgs.write_conn.commit() itgs.logger.print( Level.INFO, 'Successfully processed a temporary ban on {} in {} by {} of {} ({} seconds)', username, subreddit, mod_username, details, ban_duration )
def generate_insert_sql(self, sharience_pb: ShariencePb, fields: Optional[Set[str]] = None): columns = [] inserts = [] fields = fields or self.db_fields for field_name in fields: if self.is_field_set(sharience_pb, field_name): field_value = getattr(sharience_pb, field_name) field_name, field_value = self.cast_field_value( field_name, field_value) columns.append(field_name) inserts.append(field_value) query = PostgreSQLQuery.into( self.sharience_table).columns(*columns).insert(*inserts) return query.returning(self.sharience_table.id).get_sql()
def test_on_conflict_from_subquery(self): table_bcd = Table('bcd') query = (PostgreSQLQuery.into(self.table_abc).insert( self.table_abc.fname, self.table_abc.lname).select( table_bcd.fname, table_bcd.lname).from_(table_bcd).on_conflict( self.table_abc.id, self.table_abc.sub_id).do_update( self.table_abc.fname, 1).do_update( self.table_abc.lname, table_bcd.lname).do_update( self.table_abc.cname, Case().when(self.table_abc.cname.eq('cname'), 'new_name').else_( self.table_abc.cname))) self.assertEqual( 'INSERT INTO "abc" VALUES ("fname","lname") ' 'ON CONFLICT ("id", "sub_id") ' 'DO UPDATE SET "fname"=1,"lname"="bcd"."lname",' '"cname"=CASE WHEN "abc"."cname"=\'cname\' THEN \'new_name\' ELSE "abc"."cname" END', str(query))
def get_loansbot_user_id(itgs: 'LazyItgs') -> int: """This function returns the id of the loansbot user. If they do not exist they are created. This is useful since thats the "author" of automated permission changes. Arguments: - `itgs (LazyItgs)`: The integrations for connecting to networked services. Returns: - `loansbot_user_id (int)`: The id of the loansbot user. """ users = Table('users') unm = 'loansbot' (user_id, ) = query_helper.find_or_create_or_find( itgs, (Query.from_(users).select( users.id).where(users.username == Parameter('%s')).get_sql(), (unm, )), (Query.into(users).columns(users.username).insert( Parameter('%s')).returning(users.id).get_sql(), (unm, ))) return user_id
async def add_aliases(ob, paths: list, container=None, moved=True, storage=None): # noqa if not isinstance(moved, bool): raise Exception("Invalid type {}".format(moved)) if hasattr(ob, "context"): uuid = ob.context.uuid else: uuid = ob.uuid storage = storage or get_storage() if storage is None: return if container is None: container = find_container(ob) query = Query.into(aliases_table).columns("zoid", "container_id", "path", "moved") # noqa values = [] for i, path in enumerate(paths): if not isinstance(path, str): raise Exception("Invalid type {}".format(path)) path = "/" + path.strip("/") values.append(path) query = query.insert( uuid, container.uuid, f"${i + 1}", moved, ) query = str(query) for i in range(len(paths)): query = query.replace(f"'${i + 1}'", f"${i + 1}") async with storage.pool.acquire() as conn: await conn.execute(query, *values)
def insert_balance_reg_calc(self, points: List[BalanceRegModel]): """ Выполняет добавление всех рассчетов в базу данных """ con = engine_mart.connect() self._logger.debug("insert_balance_calc()... start") tran: Transaction = con.begin() try: for elem in points: d = Table('calc_reg_balance') q = Q.into(d).insert(elem.Id, elem.Id_tu, elem.StartPeriod, elem.Time_Start_Write) \ .on_conflict(d.id) \ .do_update(d.id_tu, elem.Id_tu) \ .do_update(d.start_period, elem.StartPeriod) \ .do_update(d.time_start_write, elem.Time_Start_Write) sql = q.get_sql() self._logger.debug(f'SQL: {sql}') con.execute(sql) tran.commit() except Exception as e: self._logger.error(f'DalMart.insert_balance_reg_calc() {e}') tran.rollback()
def store_letter_message_with_id_and_names( itgs: 'LazyItgs', user_id: int, title_id: int, title_name: str, body_id: int, body_name: str, commit=False): """Similar to store_letter_message except this assumes you already know the response information that was sent, ie., the id of the row in responses and the name of the response. Arguments: - `itgs (LazyItgs)`: The integrations to use to connect to networked components - `user_id (int)`: The id of the user who we sent the message to - `title_id (int)`: The ID of the title - `title_name (str)`: The current name of the title, in case the title response gets deleted. - `body_id (int)`: The ID of the body - `body_name (str)`: The current name of the body, in case the body response gets deleted. - `commit (bool)`: True to immediately commit the transaction, false not to. """ mod_onboarding_msg_history = Table('mod_onboarding_msg_history') itgs.write_cursor.execute( Query.into(mod_onboarding_msg_history) .columns( mod_onboarding_msg_history.user_id, mod_onboarding_msg_history.title_response_id, mod_onboarding_msg_history.title_response_name, mod_onboarding_msg_history.body_response_id, mod_onboarding_msg_history.body_response_name ) .insert(*(Parameter('%s') for _ in range(5))) .get_sql(), (user_id, title_id, title_name, body_id, body_name) ) if commit: itgs.write_conn.commit()
def create_settings_events(itgs, user_id: int, changer_user_id: int, changes: dict, commit=False): """Create user settings events from the given list of changes. These allow us to maintain a history of a users settings and who changes them. Arguments: - `itgs (LazyIntegrations)`: The integrations to use to connect to networked components. - `user_id (int)`: The id of the user whose settings changed - `changer_user_id (int)`: The id of the user who changed the settings - `changes (dict[str, dict])`: A dictionary where the keys are the property names that changes and the values are dictionaries with a fixed shape; two keys "old" and "new" which correspond to the old and new value of this property respectively. We serialize using json. - `commit (bool)`: If True we will commit the changes immediately. Defaults to false as it's easier to read controllers if all commits are explicit, i.e., the controller at least says `commit=True` """ events = Table('user_settings_events') sql = (Query.into(events).columns( events.user_id, events.changer_user_id, events.property_name, events.old_value, events.new_value).insert( *[tuple(Parameter('%s') for _ in range(5)) for _ in changes]).get_sql()) args = [] for (prop_name, change) in changes.items(): args.append(user_id) args.append(changer_user_id) args.append(prop_name) args.append(json.dumps(change['old'])) args.append(json.dumps(change['new'])) itgs.write_cursor.execute(sql, args) if commit: itgs.write_conn.commit()
def generate_insert_sql(self, scimag_pb: ScimagPb, fields: Optional[Set[str]] = None): columns = [] inserts = [] fields = fields or self.db_fields for field_name in fields: if self.is_field_set(scimag_pb, field_name): field_value = getattr(scimag_pb, field_name) field_name, field_value = self.cast_field_value( field_name, field_value) columns.append(field_name) inserts.append(field_value) query = PostgreSQLQuery.into( self.scimag_table).columns(*columns).insert(*inserts) if columns: query = query.on_conflict('doi') for field, val in zip(columns, inserts): query = query.do_update(field, val) return query.returning(self.scimag_table.id).get_sql()
def insert_balance_calc(self, points: List[BalanceModel]): """ Выполняет добавление всех рассчетов в базу данных """ con = engine_mart.connect() self._logger.debug("insert_balance_calc()... start") tran: Transaction = con.begin() try: for elem in points: d = Table('calc_balance') q = Q.into(d).insert(elem.Id, elem.Id_tu, elem.Dtp, elem.Locality, elem.NameOfAccountingPoint, elem.STrRa, elem.Pxx, elem.LossXX, elem.Ktt, elem.HeadOfCounter, elem.StartPeriod, elem.QSlim, elem.Time_Start_Write, elem.Country, elem.Driver) \ .on_conflict(d.id) \ .do_update(d.id_tu, elem.Id_tu) \ .do_update(d.dtp, elem.Dtp) \ .do_update(d.locality, elem.Locality) \ .do_update(d.name_of_accounting_point, elem.NameOfAccountingPoint) \ .do_update(d.str_ra, elem.STrRa) \ .do_update(d.pxx, elem.Pxx) \ .do_update(d.loss_xx, elem.LossXX) \ .do_update(d.ktt, elem.Ktt) \ .do_update(d.head_of_counter, elem.HeadOfCounter) \ .do_update(d.start_period, elem.StartPeriod) \ .do_update(d.q_slim, elem.QSlim) \ .do_update(d.time_start_write, elem.Time_Start_Write) \ .do_update(d.country, elem.Country) \ .do_update(d.driver, elem.Driver) sql = q.get_sql() self._logger.debug(f'SQL: {sql}') con.execute(sql) tran.commit() except Exception as e: self._logger.error(f'DalMart.insert_balance_calc() {e}') tran.rollback()
def test_default_values(self): users = Table('users') self.cursor.execute( Query.into(users).columns('username').insert( Parameter('%s')).get_sql(), ('test-user', )) self.cursor.execute( 'SELECT currval(pg_get_serial_sequence(\'users\', \'id\'))') user_id = self.cursor.fetchone() self.cursor.execute( Query.from_(users).select( users.username, users.auth, users.password_digest, users.created_at, users.updated_at).where( users.id == Parameter('%s')).limit(1).get_sql(), (user_id, )) row = self.cursor.fetchone() self.connection.rollback() self.assertIsNotNone(row) uname, auth, pdig, cat, updat = row self.assertEqual(uname, 'test-user') self.assertEqual(auth, 0) self.assertIsNone(pdig) self.assertIsNotNone(cat) self.assertIsNotNone(updat)
def test_insert_all_columns_single_element_arrays(self): query = (PostgreSQLQuery.into(self.table_abc).insert( (1, "a", True)).returning(self.table_abc.star)) self.assertEqual("INSERT INTO \"abc\" VALUES (1,'a',true) RETURNING *", str(query))
def test_insert_returning_null(self): query = PostgreSQLQuery.into(self.table_abc).insert(1).returning(None) self.assertEqual('INSERT INTO "abc" VALUES (1) RETURNING NULL', str(query))
def test_insert_returning_all_fields(self): query = (PostgreSQLQuery.into(self.table_abc).insert(1).returning( self.table_abc.star)) self.assertEqual('INSERT INTO "abc" VALUES (1) RETURNING *', str(query))
def test_insert_returning_all_fields_and_arithmetics(self): query = (PostgreSQLQuery.into(self.table_abc).insert(1).returning( self.table_abc.star, self.table_abc.f1 + self.table_abc.f2)) self.assertEqual('INSERT INTO "abc" VALUES (1) RETURNING *,"f1"+"f2"', str(query))
def test_insert_on_conflict_two_handlers_do_update(self): with self.assertRaises(QueryException): query = PostgreSQLQuery.into(self.table_abc).insert(1).on_conflict( self.table_abc.id).do_update(self.table_abc.name, "m").do_nothing()
def test_insert_returning_one_field_str(self): query = PostgreSQLQuery.into(self.table_abc).insert(1).returning("id") self.assertEqual('INSERT INTO "abc" VALUES (1) RETURNING "id"', str(query))
def test_insert_all_columns_single_element_arrays(self): query = PostgreSQLQuery.into(self.table_abc).insert((1, 'a', True)).returning(self.table_abc.star) self.assertEqual('INSERT INTO "abc" VALUES (1,\'a\',true) RETURNING *', str(query))
def test_insert_on_conflict_do_update_field_str(self): query = PostgreSQLQuery.into(self.table_abc).insert(1, "m").on_conflict( 'id').do_update('name', "m") self.assertEqual('INSERT INTO "abc" VALUES (1,\'m\') ON CONFLICT (id) DO UPDATE SET name=\'m\'', str(query))
def test_insert_returning_tuple(self): query = PostgreSQLQuery.into(self.table_abc).insert(1).returning((1, 2, 3)) self.assertEqual('INSERT INTO "abc" VALUES (1) RETURNING (1,2,3)', str(query))
def test_insert_returning_all_fields(self): query = PostgreSQLQuery.into(self.table_abc).insert(1).returning(self.table_abc.star) self.assertEqual('INSERT INTO "abc" VALUES (1) RETURNING *', str(query))
def test_insert_returning_from_other_table(self): table_cba = Table("cba") with self.assertRaises(QueryException): PostgreSQLQuery.into(self.table_abc).insert(1).returning( table_cba.id)
def test_insert_returning_from_other_table(self): table_cba = Table('cba') with self.assertRaises(QueryException): PostgreSQLQuery.into(self.table_abc).insert(1).returning(table_cba.id)
def test_insert_returning_tuple(self): query = PostgreSQLQuery.into(self.table_abc).insert(1).returning( (1, 2, 3)) self.assertEqual('INSERT INTO "abc" VALUES (1) RETURNING (1,2,3)', str(query))
def test_insert_on_conflict_no_handler(self): with self.assertRaises(QueryException): query = str(PostgreSQLQuery.into(self.table_abc).insert(1).on_conflict(self.table_abc.id))
def test_insert_returning_aggregate(self): with self.assertRaises(QueryException): PostgreSQLQuery.into(self.table_abc).insert(1).returning( Avg(self.table_abc.views))
def test_insert_on_conflict_do_nothing_field_str(self): query = PostgreSQLQuery.into(self.table_abc).insert(1).on_conflict('id').do_nothing() self.assertEqual('INSERT INTO "abc" VALUES (1) ON CONFLICT (id) DO NOTHING', str(query))
def test_insert_returning_aggregate(self): with self.assertRaises(QueryException): PostgreSQLQuery.into(self.table_abc).insert(1).returning(Avg(self.table_abc.views))
def test_insert_returning_one_field_str(self): query = PostgreSQLQuery.into(self.table_abc).insert(1).returning('id') self.assertEqual('INSERT INTO "abc" VALUES (1) RETURNING id', str(query))