def test_bad_args(self): assert_raises( ValueError, insert(self.tables.foos, values={}).on_duplicate_key_update) assert_raises( exc.ArgumentError, insert(self.tables.foos, values={}).on_duplicate_key_update, { 'id': 1, 'bar': 'b' }, id=1, bar='b', ) assert_raises( exc.ArgumentError, insert(self.tables.foos, values={}).on_duplicate_key_update, { 'id': 1, 'bar': 'b' }, { 'id': 2, 'bar': 'baz' }, )
def test_bad_args(self): assert_raises( ValueError, insert(self.tables.foos, values={}).on_duplicate_key_update, ) assert_raises( exc.ArgumentError, insert(self.tables.foos, values={}).on_duplicate_key_update, { "id": 1, "bar": "b" }, id=1, bar="b", ) assert_raises( exc.ArgumentError, insert(self.tables.foos, values={}).on_duplicate_key_update, { "id": 1, "bar": "b" }, { "id": 2, "bar": "baz" }, )
def test_on_duplicate_key_update_preserve_order(self): foos = self.tables.foos with testing.db.connect() as conn: conn.execute( insert( foos, [ dict(id=1, bar="b", baz="bz"), dict(id=2, bar="b", baz="bz2"), ], )) stmt = insert(foos) update_condition = foos.c.updated_once == False # The following statements show importance of the columns update # ordering as old values being referenced in UPDATE clause are # getting replaced one by one from left to right with their new # values. stmt1 = stmt.on_duplicate_key_update([ ( "bar", func.if_( update_condition, func.values(foos.c.bar), foos.c.bar, ), ), ( "updated_once", func.if_(update_condition, True, foos.c.updated_once), ), ]) stmt2 = stmt.on_duplicate_key_update([ ( "updated_once", func.if_(update_condition, True, foos.c.updated_once), ), ( "bar", func.if_( update_condition, func.values(foos.c.bar), foos.c.bar, ), ), ]) # First statement should succeed updating column bar conn.execute(stmt1, dict(id=1, bar="ab")) eq_( conn.execute(foos.select().where(foos.c.id == 1)).fetchall(), [(1, "ab", "bz", True)], ) # Second statement will do noop update of column bar conn.execute(stmt2, dict(id=2, bar="ab")) eq_( conn.execute(foos.select().where(foos.c.id == 2)).fetchall(), [(2, "b", "bz2", True)], )
def report_taggings(): "Receive taggings from client and save them somewhere" if os.environ.get('DYBVDQ_SQLALCHEMY_ECHO') == '1': print(request.json) payload = request.json hall = int(payload['hall'][2]) session: str = payload['session'] bounds: Dict = payload['bounds'] taggings: List[List[int]] = payload['taggings'] # [[run, file]] untaggings: List[List[int]] = payload['untaggings'] comments: List[str] = payload['comments'] loc = loc_pred(bounds['minRun'], bounds['minFile'], bounds['maxRun'], bounds['maxFile']) del_query = f'''DELETE FROM tagging WHERE session = "{session}" AND hall = {hall} AND ({loc})''' app_exec(del_query, commit=True) # need to test whether this is actually slower than "manual" bulk insert # for tagging, comment in zip(taggings, comments): # tagging = Tagging(fileno=tagging[0], # runno=tagging[1], # hall=hall, # session=session, # comment=comment) # db.session.add(tagging) # pylint: disable=no-member inserts = [{ 'hall': hall, 'session': session, 'runno': runno, 'fileno': fileno, 'comment': comment } for (runno, fileno), comment in zip(taggings, comments)] if inserts: stmt = mysql.insert(Tagging).values(inserts) # \ # .on_duplicate_key_update(hall=Tagging.hall) db.get_engine(bind='app_db').execute(stmt) # db.session.commit() # pylint: disable=no-member inserts = [{ 'hall': hall, 'session': session, 'runno': runno, 'fileno': fileno, 'untag': True, 'comment': 'Untagging' } for runno, fileno in untaggings] if inserts: stmt = mysql.insert(Tagging).values(inserts) # \ db.get_engine(bind='app_db').execute(stmt) return 'Thanks!'
def test_on_duplicate_key_update(self): foos = self.tables.foos with testing.db.connect() as conn: conn.execute(insert(foos, dict(id=1, bar='b', baz='bz'))) stmt = insert(foos, [dict(id=1, bar='ab'), dict(id=2, bar='b')]) stmt = stmt.on_duplicate_key_update(bar=stmt.values.bar) result = conn.execute(stmt) eq_(result.inserted_primary_key, [2]) eq_( conn.execute(foos.select().where(foos.c.id == 1)).fetchall(), [(1, 'ab', 'bz')])
def test_last_inserted_id(self, connection): foos = self.tables.foos conn = connection stmt = insert(foos).values({"bar": "b", "baz": "bz"}) result = conn.execute( stmt.on_duplicate_key_update(bar=stmt.inserted.bar, baz="newbz")) eq_(result.inserted_primary_key, (1, )) stmt = insert(foos).values({"id": 1, "bar": "b", "baz": "bz"}) result = conn.execute( stmt.on_duplicate_key_update(bar=stmt.inserted.bar, baz="newbz")) eq_(result.inserted_primary_key, (1, ))
def test_last_inserted_id(self): foos = self.tables.foos with testing.db.connect() as conn: stmt = insert(foos, {"bar": "b", "baz": "bz"}) result = conn.execute( stmt.on_duplicate_key_update(bar=stmt.values.bar, baz="newbz")) eq_(result.inserted_primary_key, [1]) stmt = insert(foos, {"id": 1, "bar": "b", "baz": "bz"}) result = conn.execute( stmt.on_duplicate_key_update(bar=stmt.values.bar, baz="newbz")) eq_(result.inserted_primary_key, [1])
def test_on_duplicate_key_update_expression_multirow(self, connection): foos = self.tables.foos conn = connection conn.execute(insert(foos, dict(id=1, bar="b", baz="bz"))) stmt = insert(foos).values([dict(id=1, bar="ab"), dict(id=2, bar="b")]) stmt = stmt.on_duplicate_key_update( bar=func.concat(stmt.inserted.bar, "_foo")) result = conn.execute(stmt) eq_(result.inserted_primary_key, (None, )) eq_( conn.execute(foos.select().where(foos.c.id == 1)).fetchall(), [(1, "ab_foo", "bz", False)], )
def test_on_duplicate_key_update(self): foos = self.tables.foos with testing.db.connect() as conn: conn.execute(insert(foos, dict(id=1, bar='b', baz='bz'))) stmt = insert(foos).values( [dict(id=1, bar='ab'), dict(id=2, bar='b')]) stmt = stmt.on_duplicate_key_update(bar=stmt.inserted.bar) result = conn.execute(stmt) eq_(result.inserted_primary_key, [2]) eq_( conn.execute(foos.select().where(foos.c.id == 1)).fetchall(), [(1, 'ab', 'bz')] )
def test_on_duplicate_key_update_null_multirow(self, connection): foos = self.tables.foos conn = connection conn.execute(insert(foos, dict(id=1, bar="b", baz="bz"))) stmt = insert(foos).values([dict(id=1, bar="ab"), dict(id=2, bar="b")]) stmt = stmt.on_duplicate_key_update(updated_once=None) result = conn.execute(stmt) # ambiguous eq_(result.inserted_primary_key, (None, )) eq_( conn.execute(foos.select().where(foos.c.id == 1)).fetchall(), [(1, "b", "bz", None)], )
def _my_insert(target_table, temp_table, session, if_record_exists): """ Insert or update data from temp_table to target_table :param SQLAlchemy.ext.declarative.api.DeclarativeMeta target_table: target table :param SQLAlchemy.ext.declarative.api.DeclarativeMeta temp_table: corresponding temporary table :param session: SQLAlchemy session :param str if_record_exists: {'update', 'ignore'} the strategy to deal with existed records(identified by primary key or unique constraint) - 'update': INSERT INTO target_table (column_list) SELECT column_list FROM temp_table ON DUPLICATE KEY UPDATE col=VALUES(col) - 'ignore': INSERT IGNORE INTO target_table (column_list) SELECT column_list FROM temp_table :return: None """ temp_inspect = inspect(temp_table) # convey type to columns list column_list = [c for c in temp_inspect.columns] pk_list = [k for k in temp_inspect.primary_key] # get rid of the table info bare_column_list = [c.key for c in temp_inspect.columns] bare_pk_list = [k.name for k in temp_inspect.primary_key] # get the column names from temp_table except primary key (which must be an auto incremented field here) for k in bare_pk_list: bare_column_list.remove(k) for k in pk_list: column_list.remove(k) if if_record_exists == 'update': stmt = insert(target_table).from_select( bare_column_list, session.query(*column_list)) update_dict = { column: stmt.inserted[f'{column}'] for column in bare_column_list } stmt = stmt.on_duplicate_key_update(update_dict) elif if_record_exists == 'ignore': stmt = insert(target_table).from_select( bare_column_list, session.query(*column_list)) stmt = stmt.prefix_with('IGNORE') else: raise ValueError( 'if_record_exists param only accept "update" or "ignore"') session.execute(stmt)
def test_on_duplicate_key_update_null(self): foos = self.tables.foos with testing.db.connect() as conn: conn.execute(insert(foos, dict(id=1, bar="b", baz="bz"))) stmt = insert(foos).values( [dict(id=1, bar="ab"), dict(id=2, bar="b")] ) stmt = stmt.on_duplicate_key_update(updated_once=None) result = conn.execute(stmt) eq_(result.inserted_primary_key, [2]) eq_( conn.execute(foos.select().where(foos.c.id == 1)).fetchall(), [(1, "b", "bz", None)], )
def test_on_duplicate_key_update_singlerow(self, connection): foos = self.tables.foos conn = connection conn.execute(insert(foos, dict(id=1, bar="b", baz="bz"))) stmt = insert(foos).values(dict(id=2, bar="b")) stmt = stmt.on_duplicate_key_update(bar=stmt.inserted.bar) result = conn.execute(stmt) # only one row in the INSERT so we do inserted_primary_key eq_(result.inserted_primary_key, (2, )) eq_( conn.execute(foos.select().where(foos.c.id == 1)).fetchall(), [(1, "b", "bz", False)], )
def update_avg_ext(d: date = None) -> None: if d is None: d = DateUtil.now().date() logger.info(f"Avg/extr update date: {d}") try: day_summary = DaySummary(d).stats() except ValueError: logger.warn("No records for day (yet)") return records = [] for obsVar, summary in day_summary.items(): params = dict(d=d, var=obsVar.db_field, period="day", t_mod=datetime.utcnow(), cnt=summary.count) for typ, val, at in [ ("avg", summary.avg, None), ("total", summary.total, None), ("min", summary.min_val, summary.min_at), ("max", summary.max_val, summary.max_at), ]: all_params = dict(type=typ, val=val, at=at) all_params.update(params) # TODO: don't override already overidden entries (check first) records.append( insert(m.avg_extreme).values( **all_params, ).on_duplicate_key_update(**all_params)) db.execute_batch(records)
def insert_sqlalchemy(self, dbname=None, tbname=None, dicts=None): from sqlalchemy import create_engine from sqlalchemy import MetaData from sqlalchemy import update from sqlalchemy.exc import IntegrityError from sqlalchemy.dialects.mysql import insert db_uri = self.db_url db_uri += dbname engine = create_engine(db_uri) conn = engine.connect() metadata = MetaData(engine, reflect=True) tbl = metadata.tables[tbname] # --- INSERT statement ins = insert(tbl).values(dicts) # --- ON DUPLICATE KEY UPDATE support for mysql # http://docs.sqlalchemy.org/en/latest/changelog/migration_12.html#support-for-insert-on-duplicate-key-update on_conflict_stmt = ins.on_duplicate_key_update(data=ins.inserted.data, status='U') # --- EXECUTE result = conn.execute(on_conflict_stmt) # --- get id from newly inserted row # http://docs.sqlalchemy.org/en/rel_1_0/core/tutorial.html#executing # https://groups.google.com/forum/#!topic/sqlalchemy/3LZJ0X62ZPw # ins = records.insert().returning(tbl.c.id) # print(' ===> id of inserted row:' + str(result.inserted_primary_key[0])) # NB: returns 0 if updated row ... return result
def update_df(self, df: Union[pd.Series, pd.DataFrame], table_name: str) -> None: """ 将DataFrame写入数据库""" if df is None: return if df.empty: return metadata = sa.MetaData(self.engine) metadata.reflect() table = metadata.tables[table_name.lower()] flat_df = df.reset_index() date_cols = flat_df.select_dtypes( np.datetime64).columns.values.tolist() for col in date_cols: flat_df[col] = flat_df[col].apply(self._date2str) # replace nan to None so that insert will not error out # it seems that this operation changes dtypes. so do it last start_timestamp = time.time() for col in flat_df.columns: flat_df[col] = flat_df[col].where(flat_df[col].notnull(), other=None) for _, row in flat_df.iterrows(): insert_statement = insert(table).values(**row.to_dict()) statement = insert_statement.on_duplicate_key_update( **row.to_dict()) self.engine.execute(statement) end_timestamp = time.time() logging.getLogger(__name__).debug( f'插入数据耗时 {(end_timestamp - start_timestamp):.2f} 秒.')
def upsert_players(self, players): """ Inserts or Updates Players """ conn = self.engine.connect() trans = conn.begin() try: for player in players: logging.debug("upsert_players: upserting {0}".format( player['attributes']['name'])) insert_stmt = insert(Player).values( player_id=player['id'], player_name=player['attributes']['name'], shard_id=player['attributes']['shardId']) merge_stmt = insert_stmt.on_duplicate_key_update( player_id=insert_stmt.inserted.player_id, player_name=insert_stmt.inserted.player_name, shard_id=insert_stmt.inserted.shard_id, status='U') conn.execute(merge_stmt) trans.commit() except Exception as e: trans.rollback() conn.close() return True
def split_sentence(max, database, chunk=100): ENGINE, Session = _prepare(database) def iter_sentence(cleaner, mecab, max=None): blank_ = re.compile(r'^\W*$') session = Session() i = 0 for data in session.query(Data): for sentence in cleaner.clean(data.contents, mecab): sentence_str = ' '.join(map(str, sentence)) if not blank_.match(sentence_str): if not max is None and i >= max: raise StopIteration() print(sentence_str) i += 1 yield { 'contents': sentence_str, 'data_id': data.id, 'data_file_id': data.file_id } cleaner = Cleaner() with ENGINE.begin() as conn, MeCab() as me: for sentences in chunked(iter_sentence(cleaner, me, max), chunk): insert_stmt = insert(Sentence) on_duplicate_key_stmt = insert_stmt.on_duplicate_key_update( contents=insert_stmt.inserted.contents) conn.execute(on_duplicate_key_stmt, list(sentences))
def test_on_duplicate_key_update_multirow(self, connection): foos = self.tables.foos conn = connection conn.execute(insert(foos, dict(id=1, bar="b", baz="bz"))) stmt = insert(foos).values([dict(id=1, bar="ab"), dict(id=2, bar="b")]) stmt = stmt.on_duplicate_key_update(bar=stmt.inserted.bar) result = conn.execute(stmt) # multirow, so its ambiguous. this is a behavioral change # in 1.4 eq_(result.inserted_primary_key, (None, )) eq_( conn.execute(foos.select().where(foos.c.id == 1)).fetchall(), [(1, "ab", "bz", False)], )
def test_last_inserted_id(self): foos = self.tables.foos with testing.db.connect() as conn: stmt = insert(foos).values({"bar": "b", "baz": "bz"}) result = conn.execute( stmt.on_duplicate_key_update( bar=stmt.inserted.bar, baz="newbz") ) eq_(result.inserted_primary_key, [1]) stmt = insert(foos).values({"id": 1, "bar": "b", "baz": "bz"}) result = conn.execute( stmt.on_duplicate_key_update( bar=stmt.inserted.bar, baz="newbz") ) eq_(result.inserted_primary_key, [1])
def upsert_seasons(self, seasons): """ Insert season data """ conn = self.engine.connect() trans = conn.begin() try: for season in seasons: insert_stmt = insert(Season).values( season_id=season['id'], is_current_season=season['attributes']['isCurrentSeason'], is_off_season=season['attributes']['isOffseason']) merge_stmt = insert_stmt.on_duplicate_key_update( season_id=insert_stmt.inserted.season_id, is_current_season=insert_stmt.inserted.is_current_season, is_off_season=insert_stmt.inserted.is_off_season) conn.execute(merge_stmt) trans.commit() except: trans.rollback() conn.close() return True
def post(self): """ 上传图片文件 """ req_parser = RequestParser() req_parser.add_argument('image', type=parser.image_file, required=True, location='files') file = req_parser.parse_args() user_id = g.user_id try: image_key = upload_image(file['image'].read()) except Exception as e: current_app.logger.error('upload failed {}'.format(e)) return {'message': 'Uploading profile photo image failed.'}, 507 # TODO 图片默认审核通过 query = insert(Material).values( user_id=user_id, type=Material.TYPE.IMAGE, hash=image_key, url=image_key, status=Material.STATUS.APPROVED ).on_duplicate_key_update(status=Material.STATUS.APPROVED) db.session.execute(query) db.session.commit() material = Material.query.options(load_only(Material.id, Material.url))\ .filter_by(user_id=user_id, hash=image_key).first() return {'id': material.id, 'url': current_app.config['QINIU_DOMAIN'] + material.url}, 201
def add_article(item): insert_sql = insert(Article).values( title=item['title'], link=item['link'], description=item['description'], author=item.get('author', None), text_filepath=item['text_filepath'], publication_date=datetime.strptime(item['publication_date'], "%a, %d %b %Y %H:%M:%S %z"), guid=item.get('guid', None), categories=item.get('categories', None), image_url=item.get('image_url'), credit=item.get('credit'), ) insert_sql_on_update = insert_sql.on_duplicate_key_update( title=insert_sql.inserted.title, description=insert_sql.inserted.description, author=insert_sql.inserted.author, text_filepath=insert_sql.inserted.text_filepath, publication_date=insert_sql.inserted.publication_date, guid=insert_sql.inserted.guid, categories=insert_sql.inserted.categories, image_url=insert_sql.inserted.image_url, credit=insert_sql.inserted.credit) engine.connect().execute(insert_sql_on_update)
def write_data(data, symbol, q_type, provider): """ Writes a dataframe to MySQL table. On key collisions update will be performed. The actual SQL statement to be executed is: "insert into <table_name> (<columns>) values <...> on duplicate key update <column>=values(<column>)" :param data: pd.DataFrame :param symbol: symbol key (str) :param q_type: quotes data type (i.e. MySQL table name) :param provider: data provider name :return: expression result object """ # pick the table according to the provider and type tbl = metadata.tables[q_type] data['provider'] = pd.Series(provider, index=data.index) data['symbol'] = pd.Series(symbol, index=data.index) # replace NaNs with None because SQL doesn't support them data = data.astype(object).where(pd.notnull(data), None) vals = data.to_dict(orient='records') insert_stmt = insert(tbl).values(vals) kw = {x.name: insert_stmt.inserted[x.name] for x in tbl.columns} on_duplicate_stmt = insert_stmt.on_duplicate_key_update(**kw) db_engine.dispose() with db_engine.connect() as conn: return conn.execute(on_duplicate_stmt)
def bid_upsert(bid): insert_stmt = insert(t_bid).values(feature_id=bid.feature_id, feature_name=bid.feature_name, group_id=bid.group_id, group_name=bid.group_name, layer_id=bid.layer_id, layer_name=bid.layer_name, map_id=bid.map_id, longitude=bid.longitude, latitude=bid.latitude, tag_create_time=bid.tag_create_time, tag_edit_time=bid.tag_edit_time, createtime=bid.createtime) # print(insert_stmt) on_duplicate_key_stmt = insert_stmt.on_duplicate_key_update( feature_id=insert_stmt.inserted.feature_id, feature_name=insert_stmt.inserted.feature_name, group_id=insert_stmt.inserted.group_id, group_name=insert_stmt.inserted.group_name, layer_id=insert_stmt.inserted.layer_id, layer_name=insert_stmt.inserted.layer_name, map_id=insert_stmt.inserted.map_id, longitude=insert_stmt.inserted.longitude, latitude=insert_stmt.inserted.latitude, tag_create_time=insert_stmt.inserted.tag_create_time, tag_edit_time=insert_stmt.inserted.tag_edit_time, createtime=insert_stmt.inserted.createtime, status='U') conn.execute(on_duplicate_key_stmt)
def upsert_player_matches(self, players): """ Drops the link between players and matches into the association table. """ conn = self.engine.connect() trans = conn.begin() try: for player in players: for match in player['relationships']['matches']['data']: insert_stmt = insert(PlayerMatches).values( player_id=player['id'], match_id=match['id']) merge_stmt = insert_stmt.on_duplicate_key_update( player_id=insert_stmt.inserted.player_id, match_id=insert_stmt.inserted.match_id) conn.execute(merge_stmt) trans.commit() except: trans.rollback() conn.close() return True
def update_df(self, df: pd.DataFrame, table_name: str) -> None: """ Write DataFrame to database :param df: DataFrame :param table_name: table name :return: """ metadata = sa.MetaData(self.engine, reflect=True) table = metadata.tables[table_name] flat_df = df.reset_index() date_cols = flat_df.select_dtypes( np.datetime64).columns.values.tolist() for col in date_cols: flat_df[col] = flat_df[col].apply(self.date2str) # replace nan to None so that insert will not error out # it seems that this operation changes dtypes. so do it last for col in flat_df.columns: flat_df[col] = np.where(flat_df[col].isnull(), None, flat_df[col]) for _, row in flat_df.iterrows(): insert_statement = insert(table).values(**row.to_dict()) statement = insert_statement.on_duplicate_key_update( **row.to_dict()) self.engine.execute(statement)
def persist_data(df): ''' Function to persist the data into the database. Args: - dataframe Returns: - ''' session = DBSession() df.fillna(value=np.nan, inplace=True) df.replace({np.nan: None}, inplace=True) insert_stmt = (mysql.insert(models.ExchangeRate.__table__).values( df.to_dict(orient='records'))) try: session.execute(insert_stmt) session.commit() logger.info('Added {} rows in database'.format(df.shape[0])) except exc.SQLAlchemyError as e: logger.error('Session commit failed.') logger.error(e._message) session.rollback() session.close() return
def write_data(data, symbol, q_type, provider): """ Writes a dataframe to MySQL table. On key collisions update will be performed. The actual SQL statement to be executed is: "insert into <table_name> (<columns>) values <...> on duplicate key update <column>=values(<column>)" :param data: pd.DataFrame :param symbol: symbol key (str) :param q_type: quotes data type (i.e. MySQL table name) :param provider: data provider name :return: expression result object """ # pick the table according to the provider and type tbl = metadata.tables[q_type] data['provider'] = pd.Series(provider, index=data.index) data['symbol'] = pd.Series(symbol, index=data.index) # replace NaNs with None because SQL doesn't support them data = data.astype(object).where(pd.notnull(data), None) vals = data.to_dict(orient='records') insert_stmt = insert(tbl).values(vals) kw = {x.name: insert_stmt.inserted[x.name] for x in tbl.columns} on_duplicate_stmt = insert_stmt.on_duplicate_key_update(**kw) db_engine.dispose() with db_engine.connect() as conn: return conn.execute(on_duplicate_stmt)
def test_update_sql_expr(self): stmt = insert(self.table).values([{ "id": 1, "bar": "ab" }, { "id": 2, "bar": "b" }]) stmt = stmt.on_duplicate_key_update( bar=func.coalesce(stmt.inserted.bar), baz=stmt.inserted.baz + "some literal", ) expected_sql = ( "INSERT INTO foos (id, bar) VALUES (%s, %s), (%s, %s) ON " "DUPLICATE KEY UPDATE bar = coalesce(VALUES(bar)), " "baz = (concat(VALUES(baz), %s))") self.assert_compile( stmt, expected_sql, checkparams={ "id_m0": 1, "bar_m0": "ab", "id_m1": 2, "bar_m1": "b", "baz_1": "some literal", }, )
def bid_upsert(bid): insert_stmt = insert(t_bid).values( id=bid.id, title=bid.title, loupan=bid.loupan, house_type=bid.house_type, area=bid.area, toward=bid.toward, renovation=bid.renovation, positionInfo=bid.positionInfo, totalPrice=bid.totalPrice, unitPrice=bid.unitPrice, href=bid.href, createtime=bid.createtime) # print(insert_stmt) on_duplicate_key_stmt = insert_stmt.on_duplicate_key_update( title=insert_stmt.inserted.title, loupan=insert_stmt.inserted.loupan, house_type=insert_stmt.inserted.house_type, area=insert_stmt.inserted.area, toward=insert_stmt.inserted.toward, renovation=insert_stmt.inserted.renovation, positionInfo=insert_stmt.inserted.positionInfo, totalPrice=insert_stmt.inserted.totalPrice, unitPrice=insert_stmt.inserted.unitPrice, href=insert_stmt.inserted.href, createtime=insert_stmt.inserted.createtime, status='U') conn.execute(on_duplicate_key_stmt)
def put(self): """ 修改用户频道,重置 """ try: channel_list = self._parse_channel_list() except ValueError as e: return {'message': '{}'.format(e)}, 400 user_id = g.user_id # Update user's all previous channels to be deleted. UserChannel.query.filter_by(user_id=user_id, is_deleted=False).update( {'is_deleted': True}) for channel in channel_list: query = insert(UserChannel).values(user_id=user_id, channel_id=channel['id'], sequence=channel['seq'])\ .on_duplicate_key_update(sequence=channel['seq'], is_deleted=False) db.session.execute(query) db.session.commit() # 清除缓存 cache_channel.UserChannelsCache(user_id).clear() return {'channels': channel_list}, 201
def put(self, target): """ 修改指定用户频道 """ user_id = g.user_id json_parser = RequestParser() json_parser.add_argument('seq', type=inputs.positive, required=True, location='json') args = json_parser.parse_args() exist = cache_channel.AllChannelsCache.exists(target) if not exist: return {'message': 'Invalid channel id.'}, 400 query = insert(UserChannel).values(user_id=user_id, channel_id=target, sequence=args.seq)\ .on_duplicate_key_update(sequence=args.seq, is_deleted=False) db.session.execute(query) db.session.commit() # 清除缓存 cache_channel.UserChannelsCache(user_id).clear() return {'id': target, 'seq': args.seq}, 201
def mysql_merge(merge_stmt, compiler, **kwargs): stmt = mysql.insert(merge_stmt.table, merge_stmt.values) update = { name: getattr(stmt.inserted, name) for name in stmt.parameters[0] if name not in stmt.table.primary_key } stmt = stmt.on_duplicate_key_update(**update) return compiler.process(stmt, **kwargs)
def test_bad_args(self): assert_raises( ValueError, insert(self.tables.foos, values={}).on_duplicate_key_update, ) assert_raises( exc.ArgumentError, insert(self.tables.foos, values={}).on_duplicate_key_update, {"id": 1, "bar": "b"}, id=1, bar="b", ) assert_raises( exc.ArgumentError, insert(self.tables.foos, values={}).on_duplicate_key_update, {"id": 1, "bar": "b"}, {"id": 2, "bar": "baz"}, )
def test_python_values(self): stmt = insert(self.table).values( [{'id': 1, 'bar': 'ab'}, {'id': 2, 'bar': 'b'}]) stmt = stmt.on_duplicate_key_update(bar="foobar") expected_sql = ( 'INSERT INTO foos (id, bar) VALUES (%s, %s), (%s, %s) ' 'ON DUPLICATE KEY UPDATE bar = %s' ) self.assert_compile(stmt, expected_sql)
def test_from_literal(self): stmt = insert(self.table).values( [{'id': 1, 'bar': 'ab'}, {'id': 2, 'bar': 'b'}]) stmt = stmt.on_duplicate_key_update(bar=literal_column('bb')) expected_sql = ( 'INSERT INTO foos (id, bar) VALUES (%s, %s), (%s, %s) ' 'ON DUPLICATE KEY UPDATE bar = bb' ) self.assert_compile(stmt, expected_sql)
def test_from_literal(self): stmt = insert(self.table).values( [{"id": 1, "bar": "ab"}, {"id": 2, "bar": "b"}] ) stmt = stmt.on_duplicate_key_update(bar=literal_column("bb")) expected_sql = ( "INSERT INTO foos (id, bar) VALUES (%s, %s), (%s, %s) " "ON DUPLICATE KEY UPDATE bar = bb" ) self.assert_compile(stmt, expected_sql)
def test_python_values(self): stmt = insert(self.table).values( [{"id": 1, "bar": "ab"}, {"id": 2, "bar": "b"}] ) stmt = stmt.on_duplicate_key_update(bar="foobar") expected_sql = ( "INSERT INTO foos (id, bar) VALUES (%s, %s), (%s, %s) " "ON DUPLICATE KEY UPDATE bar = %s" ) self.assert_compile(stmt, expected_sql)
def test_on_duplicate_key_update_preserve_order(self): foos = self.tables.foos with testing.db.connect() as conn: conn.execute( insert( foos, [ dict(id=1, bar="b", baz="bz"), dict(id=2, bar="b", baz="bz2"), ], ) ) stmt = insert(foos) update_condition = foos.c.updated_once == False # The following statements show importance of the columns update # ordering as old values being referenced in UPDATE clause are # getting replaced one by one from left to right with their new # values. stmt1 = stmt.on_duplicate_key_update( [ ( "bar", func.if_( update_condition, func.values(foos.c.bar), foos.c.bar, ), ), ( "updated_once", func.if_(update_condition, True, foos.c.updated_once), ), ] ) stmt2 = stmt.on_duplicate_key_update( [ ( "updated_once", func.if_(update_condition, True, foos.c.updated_once), ), ( "bar", func.if_( update_condition, func.values(foos.c.bar), foos.c.bar, ), ), ] ) # First statement should succeed updating column bar conn.execute(stmt1, dict(id=1, bar="ab")) eq_( conn.execute(foos.select().where(foos.c.id == 1)).fetchall(), [(1, "ab", "bz", True)], ) # Second statement will do noop update of column bar conn.execute(stmt2, dict(id=2, bar="ab")) eq_( conn.execute(foos.select().where(foos.c.id == 2)).fetchall(), [(2, "b", "bz2", True)], )
def test_bad_args(self): assert_raises( ValueError, insert(self.tables.foos, values={}).on_duplicate_key_update )