class mysqlPipeLine(object): conn = None cursor = None def open_spider(self, spider): print('开始爬虫并连接数据库') self.conn = Connect(host='127.0.0.1', port=3306, user='******', password='******', db='my_django_project_2', charset='utf8') self.cursor = self.conn.cursor() def process_item(self, item, spider): img_src = item['img_src'] img_path = img_src.split('/')[-1] sql = 'insert into pic_4k(img_name,img_size,img_path,img_cls) values("%s","%s","%s","%s")' % ( item['img_name'], item['img_size'], img_path, '4k动物') try: self.cursor.execute(sql) self.conn.commit() except Exception as e: print('向数据库中插入数据异常:', e) self.conn.rollback() return item def close_spider(self, spider): print('结束爬虫并关闭数据库连接') self.cursor.close() self.conn.close()
def stock(t, c, v=400, z=0.0): stk = PluginStock() stk.set_param(t, c, v, z) results = stk.anyl_dadan_zhanbi() connection = Connect(host='103.235.232.114', port=3306, user='******', password='******', db='stock', charset='utf8mb4', cursorclass=cursors.DictCursor) cursor = connection.cursor() sql = "INSERT INTO rizhanbi(code,zhanbi,timedate) values(%s,%s,%s)" cursor.executemany(sql, results) connection.commit() connection.close() # if result and (not len(result)==0): # r = redis.Redis(host='103.235.232.114', port=6379, decode_responses=True,password='******') # r.lpush(t,c) # @app.task(base=CallbackTask) # def multiply(x,y): # return x * y
def add(match): code = match.group(1) print('code:', code) conn = Connect(host='localhost', port=3306, database='stock_db', user='******', password='******', charset='utf8') cur = conn.cursor() sql_str = ''' select * from focus where info_id in (select id from info where code = %s); ''' cur.execute(sql_str, (code, )) ret = cur.fetchone() print('ret:', ret) if ret: body = '添加过' else: sql_str = '''insert into focus (info_id) select id from info where code = %s; ''' cur.execute(sql_str, (code, )) conn.commit() body = '成功' cur.connection conn.close() return body
def insert_whmcs_ipv4(insert: [(IPv4Address, str, IPv4Address, int)], interface: str, db: Connect, debug: bool = False, verbose: bool = False): """ This function insert given IPs and MACs to WHMCS :param insert: The list of IPs, MACs to insert :type insert: [(IPv4Address, str, IPv4Address, int)] :param interface: The interface of IPs :type interface: str :param db: The database connection of WHMCS :type db: pymysql.Connect :param debug: Disable commit on database :type debug: bool :param verbose: Print actions on database :type verbose: bool """ cursor = db.cursor() # Get gateway gateway = insert[0][0] del insert[0] # Get vlan if given if interface[:4] == "vlan": try: vlan = int(interface[4:]) except ValueError: raise ValueError("Invalid vlan !") else: vlan = "null" # For every IP to insert for i in insert: if i[1]: cmd = f"INSERT INTO mg_proxmox_addon_ip (ip, type, mac_address, subnet_mask, cidr, sid, gateway, tag) " \ f"VALUES ('{i[0]}', 'IPv4', '{i[1]}', '{i[2]}', {i[3]}, 0, '{gateway}', {vlan})" try: cursor.execute(cmd) except Exception as e: print(cmd, file=stderr) raise e if debug or verbose: print(cmd) cursor.close() # Commit to the DB if not debug: try: print("Commit to DB...") db.commit() except Exception as e: raise e else: print("Commited to DB")
def db_cursor(commit=False): con = None try: con = Connect(host=DBHOST, user=DBUSER, password=DBPASS, db=DB) with con.cursor() as cur: yield cur if commit: con.commit() finally: if con: con.close()
def del_foucs(ret): """添加关注 """ focus_id = ret.group(1) # 关注id conn = Connect(host='localhost', port=3306, user='******', password='******', database='stock_db', charset='utf8') cursor = conn.cursor() res = cursor.execute('delete from focus where id = %d' % int(focus_id)) # execute 只能对字符串进行拼接 if res: msg = '删除成功!' else: msg = '删除失败!' conn.commit() cursor.close() conn.close() return msg
def delete(match): code = match.group(1) conn = Connect(host='localhost', port=3306, database='stock_db', user='******', password='******', charset='utf8') cur = conn.cursor() sql_str = ''' delete from focus where info_id = (select id from info where code = %s)''' cur.execute(sql_str, (code, )) conn.commit() cur.close() conn.close() return 'OK'
class QcwyMysqlPipeline(object): def __init__(self, host, database, port, user, password): self.host = host self.database = database self.table_name = '51job' self.port = port self.user = user self.password = password @classmethod def from_crawler(cls, crawler): return cls( host=crawler.settings.get('MYSQL_HOST'), database=crawler.settings.get('MYSQL_DATABASE'), port=crawler.settings.get('MYSQL_PORT'), user=crawler.settings.get('MYSQL_USER'), password=crawler.settings.get('MYSQL_PASSWORD'), ) def open_spider(self, spider): # print(self.table_name) self.db = Connect( host=self.host, database=self.database, port=self.port, user=self.user, password=self.password, charset='utf8', ) self.cursor = self.db.cursor() def process_item(self, item, spider): data = dict(item) keys = ','.join(data.keys()) values = ','.join(['\"%s\"' % i for i in data.values()]) sql = 'insert into %s(%s) values (%s)' % (self.table_name, keys, values) try: self.cursor.execute(sql) self.db.commit() except Exception as e: self.db.rollback() print('异常错误是:', e) def close_spider(self, spider): self.cursor.close() self.db.close()
def update(ret): stock_id = ret.group(1) """由于浏览器会将url中的特殊字符进行url编码,因此在解析时需要对其进行解码码""" note = urllib.parse.unquote(ret.group(2), encoding='utf8') conn = Connect(host='localhost', port=3306, user='******', password='******', database='stock_db', charset='utf8') cursor = conn.cursor() sql = 'update focus set note_info = "%s" where id = %s;' % (note, stock_id) print(sql) res = cursor.execute(sql) if res: msg = '修改成功!' else: msg = '修改失败!' cursor.close() conn.commit() conn.close() return msg
class ConnMysql(object): # ===================数据库连接=================== def __init__(self, host='localhost', port=3306, user='******', password='******', database="allip", charset='utf8'): self.lock = Lock() try: self.conn = Connect(host=host, port=port, user=user, password=password, database=database, charset=charset) self.cursor = self.conn.cursor() except Exception as e: print("数据库连接异常:{}".format(e)) print("请检查您是否使用了默认的数据库参数...") else: pass finally: pass # ===================保存数据=================== def exe(self, sql_language): try: # 数据库插入数据 self.lock.acquire() # 获取锁,多线程应用. self.cursor.execute(sql_language) self.conn.commit() except Exception as e: self.conn.rollback() print("执行sql语句失败...{}".format(e)) else: pass finally: self.lock.release() # 释放锁 def close(self): self.cursor.close() self.conn.close()
def update_commit(match): code = match.group(1) note_info = match.group(2) note_info = unquote(note_info) conn = Connect(host='localhost', port=3306, database='stock_db', user='******', password='******', charset='utf8') cur = conn.cursor() sql_str = ''' update focus set note_info = %s where info_id = (select id from info where code = %s);''' cur.execute(sql_str, (note_info, code)) conn.commit() cur.close() conn.close() return 'OK'
class PythonJobMySqlPipeline(object): def open_spider(self, spider): print("爬虫开始了mysql------------------------------------------") self.client = Connect(host=MYSQL_HOST, user=MYSQL_USER, password=MYSQL_PASSWORD, database=MYSQL_DBNAME, port=MYSQL_PORT, charset='utf8') self.sursor = self.client.cursor() def close_spider(self, spider): #关闭数据库,释放资源 self.sursor.close() self.client.close() def process_item(self, item, spider): s = dict(item) parms = [ s["url"], s["title"], s["location"], s["company_name"], s["salary"], s["company_info"], s["experience"], s["job_info"], s["address"], s["crawled"], s["spider"], ] sql = "INSERT INTO job_items(url,title,location,company_name,salary,company_info,experience,job_info,address,crawled,spider) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)" self.sursor.execute(sql, parms) #事务提交 self.client.commit() #字典 return item
def add_foucs(ret): """添加关注 """ stock_code = ret.group(1) # 股票代号 conn = Connect(host='localhost', port=3306, user='******', password='******', database='stock_db', charset='utf8') cursor = conn.cursor() # 判断是否存在此股票 cursor.execute('select 1 from info where code = %s limit 1;', (stock_code,)) if not cursor.fetchone(): return '不存在对应的股票信息!' # 判断是否已添加关注 cursor.execute('select * from focus inner join info on focus.info_id = info.id having code = %s;', (stock_code,)) if cursor.fetchone(): return '此股票已在关注列表,请勿重复添加!' # 若未关注,则添加关注 cursor.execute('insert into focus (info_id) select id from info where code = %s;', (stock_code,)) conn.commit() cursor.close() conn.close() return '添加成功!'
def remove_whmcs_ipv4(remove: [(IPv4Address, str, IPv4Address, int)], db: Connect, debug: bool = False, verbose: bool = False): """ This function remove IPv6 on the router :param remove: The list of IPs, MACs to remove :type remove: [(IPv4Address, str, IPv4Address, int)] :param db: The database connection of WHMCS :type db: pymysql.Connect :param debug: Disable commit on database :type debug: bool :param verbose: Print each command on router :type verbose: bool """ cursor = db.cursor() for i in remove: cmd = f"DELETE FROM mg_proxmox_addon_ip WHERE ip = '{i[0]}'" try: cursor.execute(cmd) except Exception as e: print(cmd, file=stderr) raise e if debug or verbose: print(cmd) cursor.close() # Commit to the DB if not debug: try: print("Commit to DB...") db.commit() except Exception as e: raise e else: print("Commited to DB")
class BhavDB: def __init__(self, mysqldb='bhavdata'): try: self.__connection = Connect(host='localhost', user='******', password='', db=mysqldb, charset='utf8mb4', cursorclass=cursors.DictCursor) with self.__connection.cursor() as cursor: tables_sql = "SELECT table_name FROM information_schema.tables where table_schema = 'bhavdata';" cursor.execute(tables_sql) self._bhavcopy_tables = [ t['table_name'] for t in cursor.fetchall() ] if not path.exists("./logs"): os.mkdir("./logs") self._logger = open("./logs/bhav_db.log", "w") self._month_dict = { v.upper(): k for k, v in enumerate(calendar.month_abbr) } except OperationalError as op_err: raise DBConnectionError(op_err) def __del__(self): try: self.__connection.commit() self.__connection.close() self._logger.close() except Exception as err: pass # get the first nse bhav date in recorded history # return: date @property def the_first_date(self) -> date: with self.__connection.cursor() as cursor: sql_statement = "SELECT min(table_name) first_year_table FROM information_schema.tables where table_name like 'bhavcopy_%';" cursor.execute(sql_statement) result = cursor.fetchone() sql_statement = "select min(timestamp) the_first_date from {}".format( result['first_year_table']) cursor.execute(sql_statement) result = cursor.fetchone() return result['the_first_date'] @property def last_saved_date(self) -> date: with self.__connection.cursor() as cursor: sql_statement = "SELECT max(table_name) first_year_table FROM information_schema.tables where table_name like 'bhavcopy_%';" cursor.execute(sql_statement) result = cursor.fetchone() sql_statement = "select max(timestamp) last_saved_date from {}".format( result['first_year_table']) cursor.execute(sql_statement) result = cursor.fetchone() return result['last_saved_date'] @property def unsaved_dates_till_today(self) -> list: unsaved_dates = [] unsaved_date: date = self.last_saved_date while (unsaved_date <= date.today()): if unsaved_date.strftime("%a").upper() not in ['SAT', 'SUN']: print(unsaved_date.strftime("%d-%m-%Y")) unsaved_dates.append(unsaved_date) unsaved_date += timedelta(1) unsaved_date += timedelta(1) def _log_err(self, row: dict, err: Exception, zip_file_name: str): self._logger.write("Error: {}\n".format(str(err))) self._logger.write( "For: (Symbol: {}; Series: {}; Timestamp: {})\n".format( row['SYMBOL'], row['SERIES'], row['TIMESTAMP'])) self._logger.write("In: ({})\n".format(path.basename(zip_file_name))) self._logger.write("+" * max([ len("Record: Symbol: {}; Series: {}; Timestamp: {}".format( row['SYMBOL'], row['SERIES'], row['TIMESTAMP'])), len(str(err)), len(os.path.basename(zip_file_name)) ]) + "\n") # we will say that data by year # this is for github purposes not for production # so that we don't have to upload one humongous file every time we push updates to github # only the most recent year table is effected. # previous year tables are completely historic data so they won't every change once that year is done # PRIMARY KEY: This is on the symbol, series and timestamp fields since this combination is unique def _create_year_table(self, year: int): table_name = "bhavcopy_{}".format(year) create_table_sql = "CREATE table " + " if not exists " + table_name + "(" create_table_sql += "symbol varchar(15)," create_table_sql += "series char(2)," create_table_sql += "open DECIMAL(8,2)," create_table_sql += "high DECIMAL(8,2)," create_table_sql += "low DECIMAL(8,2)," create_table_sql += "close DECIMAL(8,2)," create_table_sql += "last DECIMAL(8,2)," create_table_sql += "prevclose DECIMAL(8,2)," create_table_sql += "tottrdqty int unsigned," create_table_sql += "tottrdval bigint unsigned," if year >= 2011: create_table_sql += "totaltrades mediumint unsigned default null," create_table_sql += "isin char(12) default null," create_table_sql += "timestamp date," create_table_sql += "primary key(symbol, series, timestamp)" create_table_sql += ");" with self.__connection.cursor() as cursor: cursor.execute(create_table_sql) def keep_only_eq_data(self): sql = "select column_name, table_name from information_schema.columns where table_schema = 'bhavdata';" with self.__connection.cursor() as cursor: cursor.execute(sql) tables = {} for row in cursor.fetchall(): if tables.get(row['table_name'], 0) == 0: columns = [] else: columns = tables[row['table_name']] if row['column_name'] != 'series': columns.append(row['column_name']) tables[row['table_name']] = columns for table in tables: print("doing {}".format(table)) cursor.execute( "CREATE TEMPORARY TABLE {table}_temp select {columns} from {table} where series = 'EQ';" .format(table=table, columns=",".join(tables[table]))) cursor.execute("drop table {};".format(table)) cursor.execute( "CREATE TABLE {table} select {columns} from {table}_temp;". format(table=table, columns=",".join(tables[table]))) cursor.execute( "ALTER TABLE {} ADD PRIMARY KEY(symbol,timestamp);".format( table)) cursor.execute("drop table {}_temp;".format(table)) print("done {}!!".format(table)) def _get_row_date(self, timestamp) -> date: time_parts = timestamp.split("-") year = int(time_parts[2]) month = int(self._month_dict[time_parts[1]]) day = int(time_parts[0]) return date(year, month, day) @property def bhav_count_by_year(self): for bhavcopy_table in self._bhavcopy_tables: with self.__connection.cursor() as cursor: cursor.execute( "select count(*) bhavcount from {}".format(bhavcopy_table)) count_result = cursor.fetchone() result = search("\d{4}", bhavcopy_table) yield result.group(), count_result['bhavcount'] def insert_bhav_row(self, row: dict, zip_file_name: str): row_date: date = self._get_row_date(row['TIMESTAMP']) bhavcopy_table = "bhavcopy_{}".format(row_date.year) field_sql_list: str = "" value_sql_list: str = "" for field_name, value in row.items(): if field_name.strip() != "": field_sql_list += field_name + ", " if field_name == 'TIMESTAMP': value_sql_list += "'" + str(row_date) + "', " else: value_sql_list += "'" + value + "', " field_sql_list = field_sql_list.strip(", ") value_sql_list = value_sql_list.strip(", ") sql_insert = "insert into {table_name} ({field_list}) values({value_list})".format( table_name=bhavcopy_table, field_list=field_sql_list, value_list=value_sql_list) with self.__connection.cursor() as cursor: try: if bhavcopy_table not in self._bhavcopy_tables: self._create_year_table(row_date.year) self._bhavcopy_tables.append(bhavcopy_table) cursor.execute(sql_insert) except IntegrityError as integrity_err: # IMPORTANT: Rather than check, we're just going to let the db fail on duplicates # and then not log the duplicate error if "(1062, \"Duplicate entry '" + row['SYMBOL'] + "-" + row[ 'SERIES'] + "-" + row_date.strftime( "%Y-%m-%d") + "\' for key 'PRIMARY'\")" in str( integrity_err): pass else: self._log_err(row, integrity_err, zip_file_name) except BadTimestampYearError as bad_timestamp_year_err: self._log_err(row, bad_timestamp_year_err, zip_file_name) except DataError as data_err: self._log_err(row, data_err, zip_file_name) except InternalError as interal_err: self._log_err(row, interal_err, zip_file_name) def insert_holiday_data(self, row: dict): try: with self.__connection.cursor() as cursor: cursor.execute( "insert into nse_holidays_temp (`holiday`, `timestamp`) values('{}', '{}')" .format(row['reason'], row['timestamp'])) except IntegrityError as integrity_err: # IMPORTANT: In the off chance that two holidays fall on the same day, # we'll just take the first and let the second fall thru if "(1062, \"Duplicate entry '{}\' for key 'PRIMARY'\")".format( row['timestamp']) in str(integrity_err): pass def holidays_by_year(self, year): with self.__connection.cursor() as cursor: cursor.execute( "select * from nse_holidays where year(timestamp) = {};". format(year)) return cursor.fetchall() @staticmethod def _extract_sql_to_bhavdata_dir(zip_file_name) -> ZipInfo: bhavcopy_sql_zip = ZipFile(zip_file_name) file_zip_info: ZipInfo = bhavcopy_sql_zip.filelist[0] print(file_zip_info.filename) bhavcopy_sql_zip.extract(file_zip_info.filename, "mariadb.bhav/data/bhavdata") bhavcopy_sql_zip.close() return file_zip_info def prepare_data_for_git(self): with self.__connection.cursor() as cursor: bhavcopy_zip_dict = { search("(bhavcopy_\d{4})", bhavcopy_sql_zip_file).groups()[0]: bhavcopy_sql_zip_file for bhavcopy_sql_zip_file in glob.glob( "bhavcopy.sql.zip/*.sql.zip") } # let's create two temp year tables - one for pre-2011 and one for post (after total trades was added) self._create_year_table(1000) self._create_year_table( 3000 ) # of course, we're assuming this prog will not be used in year 3000 :) for bhavcopy_table in self._bhavcopy_tables: print("Doing: {}".format(bhavcopy_table)) try: # create a temp 1000 year table temp_year_table = None result = search("\d{4}", bhavcopy_table) if int(result.group()) >= 2011: temp_year_table = "bhavcopy_{}".format(3000) else: temp_year_table = "bhavcopy_{}".format(1000) bhavcopy_zip = bhavcopy_zip_dict.get(bhavcopy_table, None) make_zip = False if bhavcopy_zip: file_zip_info: ZipInfo = self._extract_sql_to_bhavdata_dir( bhavcopy_zip_dict[bhavcopy_table]) cursor.execute( "truncate table {};".format(temp_year_table)) cursor.execute( "load data infile '{}' into table {};".format( file_zip_info.filename, temp_year_table)) cursor.execute("SELECT symbol, timestamp FROM " \ "(SELECT symbol, timestamp FROM {} UNION ALL SELECT symbol, timestamp FROM {}) tbl " \ "GROUP BY symbol, timestamp HAVING count(*) = 1 ORDER BY symbol, timestamp;".format(bhavcopy_table, temp_year_table)) table_diff = cursor.fetchone() if table_diff: make_zip = True else: make_zip = True if make_zip: cursor.execute( "select * into outfile '../../bhavcopy.sql.zip/{0}.sql' from {0};" .format(bhavcopy_table)) with ZipFile( "bhavcopy.sql.zip/{0}.sql.zip".format( bhavcopy_table), 'w', ZIP_DEFLATED) as sql_zip: sql_zip.write( "bhavcopy.sql.zip/{0}.sql".format( bhavcopy_table), "{}.sql".format(bhavcopy_table)) os.remove( "bhavcopy.sql.zip/{0}.sql".format(bhavcopy_table)) except InternalError as int_err: print(int_err) # drop the temp year tables cursor.execute("drop table bhavcopy_{};".format(1000)) cursor.execute("drop table bhavcopy_{};".format(3000)) # zip back-up of nse_holidays table # cursor.execute( # "select * into outfile '../../bhavcopy.sql.zip/nse_holidays.sql' from nse_holidays;") def get_data_of_git(self): with self.__connection.cursor() as cursor: try: cursor.execute("create database bhavdata;") cursor.execute("use bhavdata;") bhavcopy_sql_zip_files = glob.glob("bhavcopy.sql.zip/*.zip") for bhavcopy_sql_zip_file in bhavcopy_sql_zip_files: file_zip_info: ZipInfo = BhavDB._extract_sql_to_bhavdata_dir( bhavcopy_sql_zip_file) (table_name, year) = search("(bhavcopy_(\d{4}))", file_zip_info.filename).groups() self._create_year_table(int(year)) cursor.execute( "load data infile '{}' into table {};".format( file_zip_info.filename, table_name)) os.remove("mariadb.bhav/data/bhavdata/{}".format( file_zip_info.filename)) except ProgrammingError as prog_err: print(prog_err) @staticmethod def _is_holiday(day, cursor) -> bool: if day.strftime("%a").upper() in ['SAT', 'SUN']: return True else: cursor.execute( "select count(timestamp) holiday_count from nse_holidays_temp where timestamp = '{}'" .format(day)) # x = cursor.fetchone() return int(cursor.fetchone()['holiday_count']) == 1 @property def no_bhav_days(self): count = 0 day = self.the_first_date with self.__connection.cursor() as cursor: bhav_days = [] for bhavcopy_table in self._bhavcopy_tables: result = search("\d{4}", bhavcopy_table) cursor.execute( "select distinct(timestamp) as bhavdate from {};".format( bhavcopy_table)) for row in cursor.fetchall(): bhav_days.append(row['bhavdate']) while self.the_first_date <= day <= self.last_saved_date: print(day) if day not in bhav_days and not BhavDB._is_holiday( day, cursor): print("a real holiday") self._logger.write( day.strftime("%a-%d-%b-%Y").upper() + "\n") day = day + timedelta(1) print("x" * 30) print(count)
class Resource(SQLResource): # MySQL resource @typecheck def __init__(self, name: str, *, decimal_precision: (int, int), server_address: (str, int), connect_timeout: float, database: str, username: str, password: str, sql_mode: optional(str), charset: optional(str)): SQLResource.__init__(self, name, decimal_precision = decimal_precision) self._host, self._port = server_address self._connect_timeout = connect_timeout self._database = database self._username = username self._password = password self._sql_mode = sql_mode self._charset = charset or "utf8" ################################### def connect(self): SQLResource.connect(self) self._connection = Connect( host = self._host, port = self._port, connect_timeout = self._connect_timeout, db = self._database, user = self._username, passwd = self._password, sql_mode = self._sql_mode, charset = self._charset) try: self._connection.autocommit(False) except: self._connection.close() raise ################################### def _execute_sql(self, sql, params): try: param_list = ", ".join("@{0:s} = {1:s}".format(n, v) for n, v in params.items()) at_params = { n: "@{0:s}".format(n) for n in params.keys() } sql = sql.format(**at_params) except: ResourceError.rethrow(recoverable = True, terminal = False) cursor = self._connection.cursor() try: for n, v in params.items(): cursor.execute("SET @{0:s}={1:s}".format(n, v)) pmnc.log.info(">> {0:s}".format(sql)) if param_list: if pmnc.log.debug: pmnc.log.debug("-- {0:s} -- ({1:s})".format(sql, param_list)) records = [] try: cursor.execute(sql) rowcount = cursor.rowcount if rowcount >= 0: pmnc.log.info("<< OK, {0:d} record(s)".format(rowcount)) if rowcount > 0 and cursor.description: column_names = [ t[0] for t in cursor.description ] for record in cursor.fetchall(): records.append(dict(zip(column_names, record))) else: pmnc.log.info("<< OK") except MySQL_Error as e: code, message = e.args[0].args pmnc.log.warning("<< {0:s}{1:s} !! MySQL_Error(\"[{2:d}] {3:s}\") in {4:s}".\ format(sql, " -- ({0:s})".format(param_list) if param_list else "", code, message, trace_string())) SQLResourceError.rethrow(recoverable = True, code = code, description = message) # note that there is no state except Exception: pmnc.log.warning("<< {0:s}{1:s} !! {2:s}".\ format(sql, " -- ({0:s})".format(param_list) if param_list else "", exc_string())) ResourceError.rethrow(recoverable = True) else: return records finally: cursor.close() ################################### def commit(self): self._connection.commit() ################################### def rollback(self): self._connection.rollback() ################################### def disconnect(self): try: self._connection.close() except: pmnc.log.error(exc_string()) # log and ignore finally: SQLResource.disconnect(self) ################################### _supported_types = SQLResource._supported_types | { float, set, date, time } def _py_to_sql_NoneType(self, v): return self._connection.escape(SQLResource._py_to_sql_NoneType(self, v)) def _py_to_sql_int(self, v): return self._connection.escape(SQLResource._py_to_sql_int(self, v)) def _py_to_sql_Decimal(self, v): return self._connection.escape(SQLResource._py_to_sql_Decimal(self, v)) def _py_to_sql_bool(self, v): return self._connection.escape(SQLResource._py_to_sql_bool(self, v)) def _py_to_sql_datetime(self, v): return self._connection.escape(SQLResource._py_to_sql_datetime(self, v)) def _py_to_sql_str(self, v): return self._connection.escape(SQLResource._py_to_sql_str(self, v)) def _py_to_sql_bytes(self, v): return "X'{0:s}'".format(b2a_hex(v).decode("ascii")) def _py_to_sql_float(self, v): return self._connection.escape(v) def _py_to_sql_set(self, v): return "'{0:s}'".format(",".join(self._connection.escape(s)[1:-1] for s in v if isinstance(s, str))) def _py_to_sql_date(self, v): return self._connection.escape(v) def _py_to_sql_time(self, v): return self._connection.escape(v) ################################### def _sql_to_py_float(self, v): return v def _sql_to_py_set(self, v): return v def _sql_to_py_date(self, v): return v def _sql_to_py_time(self, v): return v def _sql_to_py_timedelta(self, v): # PyMySQL represents TIME as timedelta return time(hour = v.seconds // 3600, minute = (v.seconds % 3600) // 60, second = v.seconds % 60)
conn = Connect(host='localhost', user='******', passwd='ssywan933900.') curs = conn.cursor() try: curs.execute('drop database testpeopledb') except: pass curs.execute('create database testpeopledb') curs.execute('use testpeopledb') curs.execute('create table people (name char(30), job char(10), pay int(10))') curs.execute('insert people values (%s, %s, %s)', ('Bob', 'dev', 50000)) curs.execute('insert people values (%s, %s, %s)', ('Sue', 'dev', 60000)) curs.execute('insert people values (%s, %s, %s)', ('Ann', 'mgr', 40000)) curs.execute('select * from people') for row in curs.fetchall(): print(row) curs.execute('select * from people where name = %s', ('Bob', )) print(curs.description) colnames = [desc[0] for desc in curs.description] while True: print('-' * 30) row = curs.fetchone() if not row: break for (name, value) in zip(colnames, row): print('%s => %s' % (name, value)) conn.commit()