def upload_song(db: pymysql.connections.Connection, fields, values, sid): ret = False cursor = db.cursor() sql = 'insert into t_songs({}) values({}) ON DUPLICATE KEY UPDATE song_id={};' sql = sql.format(fields, values, sid) try: cursor.execute(sql) db.commit() ret = True except: StdError.error("上传模块歌曲上传出现未知错误\tsql=" + sql) db.rollback() finally: cursor.close() return ret
def upload_list(db: pymysql.connections.Connection, fields, values, lid): ret = False cursor = db.cursor() sql = 'insert into t_lists ({}) values ({}) ON DUPLICATE KEY UPDATE list_id={};' sql = sql.format(fields, values, lid) sql = sql.replace("\n", r'\n') sql = sql.replace("\t", r'\t') try: cursor.execute(sql) db.commit() ret = True except: StdError.error("上传模块歌单上传出现未知错误\tsql=" + sql) db.rollback() finally: cursor.close() return ret
def set_action(db: pymysql.connections.Connection, uid, sid, like, unlike, audition, download): ret = False cursor = db.cursor() sql = 'select action_id from t_actions where action_user={} and action_song={};'.format(uid, sid) if cursor.execute(sql) == 0: sql = 'insert into t_actions (action_user, action_song, action_like, action_unlike, action_audition, action_download) values ({}, {}, {}, {}, {}, {});'.format(uid, sid, like, unlike, audition, download) StdError.info("用户行为新增:uid={},sid={}".format(uid, sid)) else: sql = 'update t_actions set action_like={}, action_unlike={}, action_audition={}, action_download={} where action_user={} and action_song={};'.format(like, unlike, audition, download, uid, sid) StdError.info("用户行为更新:uid={},sid={}".format(uid, sid)) try: cursor.execute(sql) db.commit() ret = True except: db.rollback() StdError.error("行为设置出现未知错误\tuid={},sid={},sql={}".format(uid, sid, sql)) ret = False finally: cursor.close() return ret
def user_register(db: pymysql.connections.Connection, userName, userPwd, userSUPER, userEmail, userLikes): cursor = db.cursor() ret = False try: if cursor.execute('select user_id from t_users where user_name=\'{}\';'.format(userName)) != 0: raise UserManagerError('用户注册错误,已存在相同的用户名') sql = 'insert into t_users (user_name, user_SUPER, user_like, user_pwd, user_email) values (\'{}\', {}, \'{}\', \'{}\', \'{}\');' sql = sql.format(userName, userSUPER, ','.join(userLikes), userPwd, userEmail) StdError.info("注册用户" + sql) cursor.execute(sql) db.commit() ret = True except UserManagerError as e: db.rollback() StdError.warn(e.message + "\tuser_name=" + userName + "\tuser_SUPER=" + str(userSUPER) + "\tuser_pwd=" + str(userPwd) + "\tuser_email=" + userEmail + "\tuser_likes=" + userLikes) except pymysql.err.IntegrityError as e: db.rollback() StdError.error(str(e) + "\tuser_name=" + userName + "\tuser_SUPER=" + str(userSUPER) + "\tuser_pwd=" + str(userPwd) + "\tuser_email=" + userEmail + "\tuser_likes=" + userLikes) except: db.rollback() StdError.error("用户注册出现未知错误" + "\tuser_name=" + userName + "\tuser_SUPER=" + str(userSUPER) + "\tuser_pwd=" + str(userPwd) + "\tuser_email=" + userEmail + "\tuser_likes=" + userLikes) finally: cursor.close() return ret
def to_sql( df: pd.DataFrame, con: pymysql.connections.Connection, table: str, schema: str, mode: str = "append", index: bool = False, dtype: Optional[Dict[str, str]] = None, varchar_lengths: Optional[Dict[str, int]] = None, ) -> None: """Write records stored in a DataFrame into MySQL. Parameters ---------- df : pandas.DataFrame Pandas DataFrame https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.html con : pymysql.connections.Connection Use pymysql.connect() to use credentials directly or wr.mysql.connect() to fetch it from the Glue Catalog. table : str Table name schema : str Schema name mode : str Append or overwrite. index : bool True to store the DataFrame index as a column in the table, otherwise False to ignore it. dtype: Dict[str, str], optional Dictionary of columns names and MySQL types to be casted. Useful when you have columns with undetermined or mixed data types. (e.g. {'col name': 'TEXT', 'col2 name': 'FLOAT'}) varchar_lengths : Dict[str, int], optional Dict of VARCHAR length by columns. (e.g. {"col1": 10, "col5": 200}). Returns ------- None None. Examples -------- Writing to MySQL using a Glue Catalog Connections >>> import awswrangler as wr >>> con = wr.mysql.connect("MY_GLUE_CONNECTION") >>> wr.mysql.to_sql( ... df=df ... table="my_table", ... schema="test", ... con=con ... ) >>> con.close() """ if df.empty is True: raise exceptions.EmptyDataFrame() _validate_connection(con=con) try: with con.cursor() as cursor: _create_table( df=df, cursor=cursor, table=table, schema=schema, mode=mode, index=index, dtype=dtype, varchar_lengths=varchar_lengths, ) if index: df.reset_index(level=df.index.names, inplace=True) placeholders: str = ", ".join(["%s"] * len(df.columns)) sql: str = f"INSERT INTO `{schema}`.`{table}` VALUES ({placeholders})" _logger.debug("sql: %s", sql) parameters: List[List[Any]] = _db_utils.extract_parameters(df=df) cursor.executemany(sql, parameters) con.commit() # type: ignore except Exception as ex: con.rollback() # type: ignore _logger.error(ex) raise
def to_sql( df: pd.DataFrame, con: pymysql.connections.Connection, table: str, schema: str, mode: str = "append", index: bool = False, dtype: Optional[Dict[str, str]] = None, varchar_lengths: Optional[Dict[str, int]] = None, use_column_names: bool = False, chunksize: int = 200, ) -> None: """Write records stored in a DataFrame into MySQL. Parameters ---------- df : pandas.DataFrame Pandas DataFrame https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.html con : pymysql.connections.Connection Use pymysql.connect() to use credentials directly or wr.mysql.connect() to fetch it from the Glue Catalog. table : str Table name schema : str Schema name mode : str Append, overwrite, upsert_duplicate_key, upsert_replace_into, upsert_distinct. append: Inserts new records into table overwrite: Drops table and recreates upsert_duplicate_key: Performs an upsert using `ON DUPLICATE KEY` clause. Requires table schema to have defined keys, otherwise duplicate records will be inserted. upsert_replace_into: Performs upsert using `REPLACE INTO` clause. Less efficient and still requires the table schema to have keys or else duplicate records will be inserted upsert_distinct: Inserts new records, including duplicates, then recreates the table and inserts `DISTINCT` records from old table. This is the least efficient approach but handles scenarios where there are no keys on table. index : bool True to store the DataFrame index as a column in the table, otherwise False to ignore it. dtype: Dict[str, str], optional Dictionary of columns names and MySQL types to be casted. Useful when you have columns with undetermined or mixed data types. (e.g. {'col name': 'TEXT', 'col2 name': 'FLOAT'}) varchar_lengths : Dict[str, int], optional Dict of VARCHAR length by columns. (e.g. {"col1": 10, "col5": 200}). use_column_names: bool If set to True, will use the column names of the DataFrame for generating the INSERT SQL Query. E.g. If the DataFrame has two columns `col1` and `col3` and `use_column_names` is True, data will only be inserted into the database columns `col1` and `col3`. chunksize: int Number of rows which are inserted with each SQL query. Defaults to inserting 200 rows per query. Returns ------- None None. Examples -------- Writing to MySQL using a Glue Catalog Connections >>> import awswrangler as wr >>> con = wr.mysql.connect("MY_GLUE_CONNECTION") >>> wr.mysql.to_sql( ... df=df, ... table="my_table", ... schema="test", ... con=con ... ) >>> con.close() """ if df.empty is True: raise exceptions.EmptyDataFrame() mode = mode.strip().lower() modes = [ "append", "overwrite", "upsert_replace_into", "upsert_duplicate_key", "upsert_distinct", ] if mode not in modes: raise exceptions.InvalidArgumentValue( f"mode must be one of {', '.join(modes)}") _validate_connection(con=con) try: with con.cursor() as cursor: _create_table( df=df, cursor=cursor, table=table, schema=schema, mode=mode, index=index, dtype=dtype, varchar_lengths=varchar_lengths, ) if index: df.reset_index(level=df.index.names, inplace=True) column_placeholders: str = ", ".join(["%s"] * len(df.columns)) insertion_columns = "" upsert_columns = "" upsert_str = "" if use_column_names: insertion_columns = f"({', '.join(df.columns)})" if mode == "upsert_duplicate_key": upsert_columns = ", ".join( df.columns.map( lambda column: f"`{column}`=VALUES(`{column}`)")) upsert_str = f" ON DUPLICATE KEY UPDATE {upsert_columns}" placeholder_parameter_pair_generator = _db_utils.generate_placeholder_parameter_pairs( df=df, column_placeholders=column_placeholders, chunksize=chunksize) sql: str for placeholders, parameters in placeholder_parameter_pair_generator: if mode == "upsert_replace_into": sql = f"REPLACE INTO `{schema}`.`{table}` {insertion_columns} VALUES {placeholders}" else: sql = f"INSERT INTO `{schema}`.`{table}` {insertion_columns} VALUES {placeholders}{upsert_str}" _logger.debug("sql: %s", sql) cursor.executemany(sql, (parameters, )) con.commit() if mode == "upsert_distinct": temp_table = f"{table}_{uuid.uuid4().hex}" cursor.execute( f"CREATE TABLE `{schema}`.`{temp_table}` LIKE `{schema}`.`{table}`" ) cursor.execute( f"INSERT INTO `{schema}`.`{temp_table}` SELECT DISTINCT * FROM `{schema}`.`{table}`" ) cursor.execute(f"DROP TABLE IF EXISTS `{schema}`.`{table}`") cursor.execute( f"ALTER TABLE `{schema}`.`{temp_table}` RENAME TO `{table}`" ) con.commit() except Exception as ex: con.rollback() _logger.error(ex) raise
def to_sql( df: pd.DataFrame, con: pymysql.connections.Connection, table: str, schema: str, mode: str = "append", index: bool = False, dtype: Optional[Dict[str, str]] = None, varchar_lengths: Optional[Dict[str, int]] = None, use_column_names: bool = False, chunksize: int = 200, ) -> None: """Write records stored in a DataFrame into MySQL. Parameters ---------- df : pandas.DataFrame Pandas DataFrame https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.html con : pymysql.connections.Connection Use pymysql.connect() to use credentials directly or wr.mysql.connect() to fetch it from the Glue Catalog. table : str Table name schema : str Schema name mode : str Append or overwrite. index : bool True to store the DataFrame index as a column in the table, otherwise False to ignore it. dtype: Dict[str, str], optional Dictionary of columns names and MySQL types to be casted. Useful when you have columns with undetermined or mixed data types. (e.g. {'col name': 'TEXT', 'col2 name': 'FLOAT'}) varchar_lengths : Dict[str, int], optional Dict of VARCHAR length by columns. (e.g. {"col1": 10, "col5": 200}). use_column_names: bool If set to True, will use the column names of the DataFrame for generating the INSERT SQL Query. E.g. If the DataFrame has two columns `col1` and `col3` and `use_column_names` is True, data will only be inserted into the database columns `col1` and `col3`. chunksize: int Number of rows which are inserted with each SQL query. Defaults to inserting 200 rows per query. Returns ------- None None. Examples -------- Writing to MySQL using a Glue Catalog Connections >>> import awswrangler as wr >>> con = wr.mysql.connect("MY_GLUE_CONNECTION") >>> wr.mysql.to_sql( ... df=df, ... table="my_table", ... schema="test", ... con=con ... ) >>> con.close() """ if df.empty is True: raise exceptions.EmptyDataFrame() _validate_connection(con=con) try: with con.cursor() as cursor: _create_table( df=df, cursor=cursor, table=table, schema=schema, mode=mode, index=index, dtype=dtype, varchar_lengths=varchar_lengths, ) if index: df.reset_index(level=df.index.names, inplace=True) column_placeholders: str = ", ".join(["%s"] * len(df.columns)) insertion_columns = "" if use_column_names: insertion_columns = f"({', '.join(df.columns)})" placeholder_parameter_pair_generator = _db_utils.generate_placeholder_parameter_pairs( df=df, column_placeholders=column_placeholders, chunksize=chunksize) for placeholders, parameters in placeholder_parameter_pair_generator: sql: str = f"INSERT INTO `{schema}`.`{table}` {insertion_columns} VALUES {placeholders}" _logger.debug("sql: %s", sql) cursor.executemany(sql, (parameters, )) con.commit() except Exception as ex: con.rollback() _logger.error(ex) raise
def roll_back(conn: pymysql.connections.Connection, cursor: pymysql.cursors.Cursor): conn.rollback() cursor.close() conn.close()