def test_insert_one_column_using_values(self): query = MySQLQuery \ .into(self.table_abc).insert(1) \ .on_duplicate_key_update(self.table_abc.foo, Values(self.table_abc.foo)) self.assertEqual( 'INSERT INTO `abc` VALUES (1) ON DUPLICATE KEY UPDATE `foo`=VALUES(`foo`)', str(query))
def insert_tweets_sql(tweets): field = ['id', 'retweet_count', 'favorite_count', 'lang', 'text'] extra_field = ['tweeted_by', 'raw_json', 'retweeted_status', 'created_at'] getter = attrgetter(*field) table = Table('tweets') def params_of(tweet): retweeted_status = str(tweet.retweeted_status) \ if tweet.retweeted_status else None raw_json = str(tweet) created_at = ctime_to_mysql_datetime(tweet.created_at) values = getter(tweet) + \ (tweet.user.id, raw_json, retweeted_status, created_at) return values return ( str(Query.into(table) .columns(field + extra_field) .insert(*[Parameter('%s') for _ in range(len(field) + len(extra_field))]) .on_duplicate_key_update( table.lang, Values(table.lang))), [params_of(t) for t in tweets])
def test_insert_one_column_using_values(self): query = (MySQLQuery.into( self.table_abc).insert(1).on_conflict().do_update( self.table_abc.foo, Values(self.table_abc.foo))) self.assertEqual( "INSERT INTO `abc` VALUES (1) ON DUPLICATE KEY UPDATE `foo`=VALUES(`foo`)", str(query), )
def test_insert_multiple_columns_on_duplicate_update_one_with_expression_using_values( self): query = MySQLQuery \ .into(self.table_abc).insert(1, 'a') \ .on_duplicate_key_update(self.table_abc.bar, fn.Concat(Values(self.table_abc.bar), 'update')) self.assertEqual( 'INSERT INTO `abc` VALUES (1,\'a\') ON DUPLICATE KEY UPDATE `bar`=CONCAT(VALUES(`bar`),\'update\')', str(query))
def test_insert_multiple_columns_on_duplicate_update_one_with_same_value( self): query = MySQLQuery \ .into(self.table_abc).insert(1, 'a') \ .on_duplicate_key_update(self.table_abc.bar, Values(self.table_abc.bar)) self.assertEqual( 'INSERT INTO `abc` VALUES (1,\'a\') ON DUPLICATE KEY UPDATE `bar`=VALUES(`bar`)', str(query))
def test_insert_multiple_columns_on_duplicate_update_one_with_same_value( self): query = (MySQLQuery.into(self.table_abc).insert( 1, "a").on_conflict().do_update(self.table_abc.bar, Values(self.table_abc.bar))) self.assertEqual( "INSERT INTO `abc` VALUES (1,'a') ON DUPLICATE KEY UPDATE `bar`=VALUES(`bar`)", str(query), )
def test_insert_multiple_columns_on_duplicate_update_one_with_expression_using_values( self, ): query = (MySQLQuery.into(self.table_abc).insert( 1, "a").on_duplicate_key_update( self.table_abc.bar, fn.Concat(Values(self.table_abc.bar), "update"))) self.assertEqual( "INSERT INTO `abc` VALUES (1,'a') ON DUPLICATE KEY UPDATE `bar`=CONCAT(VALUES(`bar`),'update')", str(query), )
def test_insert_selected_columns_on_duplicate_update_multiple(self): query = MySQLQuery.into(self.table_abc) \ .columns(self.table_abc.foo, self.table_abc.bar, self.table_abc.baz) \ .insert(1, 'a', True) \ .on_duplicate_key_update(self.table_abc.baz, False) \ .on_duplicate_key_update(self.table_abc.bar, Values(self.table_abc.bar)) self.assertEqual( 'INSERT INTO `abc` (`foo`,`bar`,`baz`) VALUES (1,\'a\',true) ' 'ON DUPLICATE KEY UPDATE `baz`=false,`bar`=VALUES(`bar`)', str(query))
def test_insert_multi_rows_chained_mixed_on_duplicate_update_multiple( self): query = MySQLQuery.into(self.table_abc) \ .insert((1, 'a', True), (2, 'b', False)) \ .insert(3, 'c', True) \ .on_duplicate_key_update(self.table_abc.foo, self.table_abc.foo) \ .on_duplicate_key_update(self.table_abc.bar, Values(self.table_abc.bar)) self.assertEqual( 'INSERT INTO `abc` VALUES (1,\'a\',true),(2,\'b\',false),(3,\'c\',true) ' 'ON DUPLICATE KEY UPDATE `foo`=`foo`,`bar`=VALUES(`bar`)', str(query))
def test_insert_selected_columns_on_duplicate_update_multiple(self): query = (MySQLQuery.into(self.table_abc).columns( self.table_abc.foo, self.table_abc.bar, self.table_abc.baz).insert(1, "a", True).on_conflict().do_update( self.table_abc.baz, False).do_update(self.table_abc.bar, Values(self.table_abc.bar))) self.assertEqual( "INSERT INTO `abc` (`foo`,`bar`,`baz`) VALUES (1,'a',true) " "ON DUPLICATE KEY UPDATE `baz`=false,`bar`=VALUES(`bar`)", str(query), )
def test_insert_multi_rows_chained_mixed_on_duplicate_update_multiple( self): query = (MySQLQuery.into(self.table_abc).insert( (1, "a", True), (2, "b", False)).insert(3, "c", True).on_duplicate_key_update( self.table_abc.foo, self.table_abc.foo).on_duplicate_key_update( self.table_abc.bar, Values(self.table_abc.bar))) self.assertEqual( "INSERT INTO `abc` VALUES (1,'a',true),(2,'b',false),(3,'c',true) " "ON DUPLICATE KEY UPDATE `foo`=`foo`,`bar`=VALUES(`bar`)", str(query), )
def insert_users_sql(users): field = ['id', 'description', 'followers_count', 'friends_count', 'statuses_count', 'screen_name'] getter = attrgetter(*field) table = Table('users') def params_of(user): created_at = ctime_to_mysql_datetime(user.created_at) values = getter(user) + (created_at,) return values return ( str(Query.into(table) .columns(field + ['created_at']) .insert(*[Parameter('%s') for _ in range(len(field) + 1)]) .on_duplicate_key_update( table.screen_name, Values(table.screen_name))), [params_of(u) for u in users])
def set_encrypted_password(doctype, name, pwd, fieldname="password"): query = ( frappe.qb.into(Auth) .columns(Auth.doctype, Auth.name, Auth.fieldname, Auth.password, Auth.encrypted) .insert(doctype, name, fieldname, encrypt(pwd), 1) ) # TODO: Simplify this via aliasing methods in `frappe.qb` if frappe.db.db_type == "mariadb": query = query.on_duplicate_key_update(Auth.password, Values(Auth.password)) elif frappe.db.db_type == "postgres": query = query.on_conflict(Auth.doctype, Auth.name, Auth.fieldname).do_update(Auth.password) try: query.run() except frappe.db.DataError as e: if frappe.db.is_data_too_long(e): frappe.throw(_("Most probably your password is too long."), exc=e) raise e
def build_query_insert(table, key_duplicate, list_values): """ Build query Insert :param key_duplicate : boolean in oredr to have dupliacte key on query :param table : table name :param list_values: list of values to insert return query """ table = Table(table) values_table = [] for i in range(len(list_values)): values_table.append(tuple(list_values[i])) q = BuildInjectQuery.function_insert( MySQLQuery.into(table).insert(values_table[0]), values_table, table, 0) if key_duplicate: q = q.on_duplicate_key_update(table.lei, Values(table.lei)) \ .on_duplicate_key_update(table.entity_legalname, \ Values(table.entity_legalname)) \ .on_duplicate_key_update(table.entity_entity_status, \ Values(table.entity_entity_status)) \ .on_duplicate_key_update(table.entity_legalname, \ Values(table.entity_legalname)) \ .on_duplicate_key_update(table.entity_otherentity_names, \ Values(table.entity_otherentity_names)) \ .on_duplicate_key_update(table.entity_other_addresses, \ Values(table.entity_other_addresses)) \ .on_duplicate_key_update(table.entity_legal_address_firstaddressline, \ Values(table.entity_legal_address_firstaddressline)) \ .on_duplicate_key_update(table.entity_legal_address_city, \ Values(table.entity_legal_address_city)) \ .on_duplicate_key_update(table.entity_legal_address_region, \ Values(table.entity_legal_address_region)) \ .on_duplicate_key_update(table.entity_legal_address_country, \ Values(table.entity_legal_address_country)) \ .on_duplicate_key_update(table.entity_legal_address_postalcode, \ Values(table.entity_legal_address_postalcode)) \ .on_duplicate_key_update(table.entity_headquarter_address_firstaddressline, \ Values(table.entity_headquarter_address_firstaddressline)) \ .on_duplicate_key_update(table.entity_headquarter_address_city, \ Values(table.entity_headquarter_address_city)) \ .on_duplicate_key_update(table.entity_headquarter_address_region, \ Values(table.entity_headquarter_address_region)) \ .on_duplicate_key_update(table.entity_headquarter_address_country, \ Values(table.entity_headquarter_address_country)) \ .on_duplicate_key_update(table.entity_headquarter_address_postalcode, \ Values(table.entity_headquarter_address_postalcode)) \ .on_duplicate_key_update(table.registration_initial_registration_date, \ Values(table.registration_initial_registration_date)) \ .on_duplicate_key_update(table.registration_registration_status, \ Values(table.registration_registration_status)) \ .on_duplicate_key_update(table.registration_next_renewal_date, \ Values(table.registration_next_renewal_date)) \ .on_duplicate_key_update(table.registration_validation_sources, \ Values(table.registration_validation_sources)) return str(q)
def stock_list_data(pro, engine): """ 获取股票列表数据 """ data = pro.stock_basic(fields=",".join(StockCompanyData.get_fields())) size = len(data) all_list = [] for index in range(0, size): basic_data = [ "" if data.iloc[index][field] == None else data.iloc[index][field] for field in StockCompanyData.get_fields() ] all_list.append(basic_data) redis_conn = get_redis_conn(pool_redis) conn = pymysql.connect(user=mysql_config["user"], password=mysql_config["password"], database=mysql_config["database"], charset=mysql_config["charset"]) cursor = conn.cursor() for basic_data in all_list: update_basic_data = "\t".join(basic_data) pre_basic_data = redis_conn.hget("stock_basic", basic_data[0]) if pre_basic_data == update_basic_data: continue redis_conn.hset("stock_basic", basic_data[0], update_basic_data) stock_basic_table = Table("stock_basic") sql = MySQLQuery.into(stock_basic_table).insert( basic_data).on_duplicate_key_update( Tuple(stock_basic_table.symbol, stock_basic_table.name, stock_basic_table.area, stock_basic_table.industry, stock_basic_table.fullname, stock_basic_table.enname, stock_basic_table.market, stock_basic_table.exchange, stock_basic_table.curr_type, stock_basic_table.list_status, stock_basic_table.list_date, stock_basic_table.delist_date, stock_basic_table.is_hs), Tuple( Values(stock_basic_table.symbol, stock_basic_table.area, stock_basic_table.industry, stock_basic_table.fullname, stock_basic_table.enname, stock_basic_table.market, stock_basic_table.exchange, stock_basic_table.curr_type, stock_basic_table.list_status, stock_basic_table.list_date, stock_basic_table.delist_date, stock_basic_table.is_hs))) cursor.execute(sql, ) conn.commit() cursor.close() conn.close() basic_data_path = "%s/stock_basic_%s.log" % ( basic_data_config["stock_basic_path"], time.strftime('%Y-%m-%d', time.localtime(time.time()))) with open(basic_data_path, "w+") as f: for basic_data in all_list: f.write("\t".join(basic_data)) f.write("\n") return