直接回车表示退出。 服务端,负责查询单词, 单词从数据库dict中查询, 给客户端提供查询结果 """ from socket import * import pymysql args = { "host": "localhost", "port": 3306, "user": "******", "password": "******", "database": "dict", "charset": "utf8" } db = pymysql.Connect(**args) cur = db.cursor() # 生成udp套接字 udp_sock = socket(AF_INET, SOCK_DGRAM) # 绑定地址 udp_sock.bind(("0.0.0.0", 8888)) while True: # 接收数据 recvfrom 阻塞等待 data, addr = udp_sock.recvfrom(1024) print("接收到word:", data.decode()) # data --> bytes #获取单词解释 sql = "select mean from words where word=%s;" #需要都是%s字符串,才能使得sql语句正确 cur.execute(sql, [data.decode()]) res = cur.fetchone() #返回元祖
def parse(self, response): parse_url = response.url # 解析当前url bookId = parse_url.split('_')[1].strip('/') conn = pymysql.Connect(host=self.host, port=self.port, user=self.user, password=self.password, db=self.dbname, charset=self.dbcharset) self.conn = conn cursor = conn.cursor() # 解析小说 novel_name = response.xpath( '//div[@id="info"]/h1/text()').extract_first() novel_author = response.xpath( '//div[@id="info"]/p[1]/text()').extract_first().split(':')[1] novel_img = 'https://www.xinxs.la' + response.xpath( '//div[@id="fmimg"]/img/@src').extract_first() novel_info = response.xpath( '//div[@id="intro"]/text()').extract_first().strip() novel_lastupt = response.xpath( '//div[@id="info"]/p[3]/text()').extract_first().split(':')[1] novel_type = response.xpath( '//meta[@property="og:novel:category"]/@content').extract_first() chapter_list = response.xpath('//div[@id="list"]/dl/dd/a/@href') chapters = [] # 判断是否有 斜杠 否则pass for chapter in chapter_list: chapter_str = chapter.extract() if '/' not in chapter_str: continue chapters.append(chapter_str) type_dict = { '武侠仙侠': '5', '玄幻奇幻': '4', '都市言情': '9', '历史军事': '6', '网游竞技': '21', '科幻灵异': '8', '女频频道': '9' } try: label = type_dict[novel_type] except: label = '4' try: updated = int( time.mktime( time.strptime(novel_lastupt, "%d/%m/%Y %H:%M:%S %p"))) except: updated = int( time.mktime( time.strptime(novel_lastupt, "%m/%d/%Y %H:%M:%S %p"))) # 判断小说是否存在数据库 sql_find = "select id from novels where name='%s' and bookId='%s' and novel_web=4;" cursor.execute(sql_find % (novel_name, bookId)) fin = cursor.fetchone() if not fin: # 采集 words = 0 now_time = datetime.now() now_time = now_time.strftime("%Y-%m-%d %H:%M:%S") setargs(novel_author, [], '都市', conn) # 将作者添加作者表 words created sql = "select id from author where name='%s';" cursor.execute(sql % novel_author) try: authorId = cursor.fetchone()[0] # 作者id except Exception as e: authorId = 170 print(e) sql = "insert into novels(name,cover,summary,label,state,words,created,updated,authorId,target,score,bookId,addtime,novel_web,updatetime) values('%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s');" cursor.execute(sql % (novel_name, novel_img, novel_info, label, '1', words, '0', updated, authorId, '', '9.5', bookId, now_time, '4', now_time)) conn.commit() # 获取小说id sql = "select id from novels where bookId='%s' and name='%s';" cursor.execute(sql % (bookId, novel_name)) novelId = cursor.fetchone()[0] try: # 将小说名字添加到elasticsearch索引 self.es.index(index="novel-index", id=novelId, body={ "title": novel_name, "timestamp": datetime.now() }) except Exception as e: print('%s-----------%s' % (now_time, e)) chapter_count = 0 # 记录章节总数 for chapter in chapters: chapter_count += 1 # 拼接章节url http://www.xbiquge.la/19/19523/10080224.html url = 'https://www.xinxs.la' + chapter yield scrapy.Request(url=url, callback=self.parse_chaper, meta={ 'novelId': novelId, 'bookId': bookId, 'chapterId': chapter_count }) else: # 更新 novelId = fin[0] now_time = datetime.now() now_time = now_time.strftime("%Y-%m-%d %H:%M:%S") chapter_count = 0 # 记录章节总数 for chapter in chapters: chapter_count += 1 # 判断章节是否存在 sql_find = "select id from chapters where novelId='%s' and chapterId='%s';" chapter_url = chapter chapterId = chapter_count # 判断数据库是否存在 novelId chapterId if not cursor.execute(sql_find % (novelId, chapterId)): # 更新小说表小说更新时间 sql_update = "update novels set updated='%s',updatetime='%s' where id='%s';" cursor.execute(sql_update % (updated, now_time, novelId)) conn.commit() # 拼接章节url http://www.xbiquge.la/19/19523/10080224.html url = 'https://www.xinxs.la' + chapter_url yield scrapy.Request(url=url, callback=self.parse_chaper, meta={ 'novelId': novelId, 'bookId': bookId, 'chapterId': chapter_count }) sql = "update novels set chaptercount= '%s' where id='%s';" cursor.execute(sql % (chapter_count, novelId)) conn.commit()
if rst: return rst else: print('1') return except: f = open("wxmail.txt", 'a') traceback.print_exc(file=f) f.flush() f.close() finally: cur.close() if __name__ == '__main__': conn = pymysql.Connect(host='rm-bp1nxe7521rz0v77go.mysql.rds.aliyuncs.com', user='******', passwd='mytijian', db='mytijian_dw', port=3306, charset='utf8') dt = getwxdata(conn) plus = getSum(conn) conn.close() tb_head = '<tr align="left" style="color:red"><td></td><td>体检中心</td><td>新关注</td><td>取消关注</td><td>总关注</td><td>新绑定</td><td>总绑定</td>' \ '<td>其他</td><td>搜索</td><td>分享</td><td>扫码</td><td>支付</td></tr>' total_str = '<tr align="left" style="color:red"><td></td><td>合计:{}</td><td>{}</td><td>{}</td><td>{}</td><td>{}</td><td>{}</td>' \ '<td>{}</td><td>{}</td><td>{}</td><td>{}</td><td>{}</td></tr>' tb_body = '' for i in dt: # 只展示总关注数>=500的数据 if i[4] < 500: break par = '' tmp = '<td>{}</td>'.format(str(dt.index(i) + 1)) for j in range(1, len(i)):
import pymysql conn = pymysql.Connect( host='10.35.165.217', # MySQL所在的主机地址 port=3306, # MySQL的默认端口 user='******', # 连接MySQL的用户名 passwd='123456', # 连接MySQL的密码 db='mydb', # 要进入的数据库 charset='utf8', # 连接的编码 ) cursor = conn.cursor() # 获取游标对象 sql = "select stuid, stuname, sex, score from student where stuid = %d" cursor.execute(sql % (1, )) # cursor.execute(sql % 1) 这样也可以 result = cursor.fetchone() # 获取单条数据,返回一个元组,元组里的每个元素是该条记录的具体信息 print("查询到的结果是:", result) print("学号:", result[0], "姓名:", result[1], "性别:", result[2], "成绩:", result[3]) cursor.close() conn.close()
import requests from bs4 import BeautifulSoup import json import pymysql headers = { "User-Agent": "Mozilla/5.0 (Linux; Android 5.1.1; nxt-al10 Build/LYZ28N) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/39.0.0.0 Mobile Safari/537.36 sinablog-android/5.3.2 (Android 5.1.1; zh_CN; huawei nxt-al10/nxt-al10)", "Content-Type": "application/x-www-form-urlencoded; charset=utf-8" } conn = pymysql.Connect(host='localhost', port=3306, user='******', passwd='123456', db='spider', charset='utf8mb4') def start(): next_page(1) conn.close() def next_page(page): r = requests.get(url='https://igg-games.com/page/' + str(page), data=None, headers=headers) soup = BeautifulSoup(r.text, 'html.parser') articles = soup.find_all("article") items = list(map(convert_item, articles))
#Import library from main import bot, dp from aiogram import types from aiogram.types import Message from config import admin_id import pymysql from config import dbHost, dbUser, dbPassword, dbName #DB connection connection = pymysql.Connect(dbHost, dbUser, dbPassword, dbName) #Keyboard keyboard_markup = types.ReplyKeyboardMarkup(row_width=3) arr_keyboard = ['btn1', 'btn2'] #Send message to admin async def send_to_admin(dp): await bot.send_message( chat_id=admin_id, text= "Добро пожаловать, Господь Бог. Напиши /start, чтобы посмотреть мои навыки" ) #Start bot function @dp.message_handler(commands=['start']) async def send_welcome(message: types.Message): keyboard_markup.add(*(types.KeyboardButton(text) for text in arr_keyboard)) await message.answer(text='Привет! Потестим кнопки', reply_markup=keyboard_markup)
from sys import argv mysql_config = { 'host': 'cashlending-readonly.c3dsrzz0nv8o.ap-southeast-1.rds.amazonaws.com', 'port': 3306, 'user': '******', 'password': '******', # 'db': 'cashlending', 'charset': 'utf8' } data_begin = int(argv[1]) data_end = int(argv[2]) con = pymysql.Connect(**mysql_config) application_sql = 'select lai.id,\ lai.member_id,\ laix.longitude,\ lai.geographical_location, \ loi.status \ from cashlending.loan_application_info lai left join \ cashlending.loan_order_info loi \ on lai.id = loi.application_id \ left join cashlending.loan_application_info_ext laix \ on lai.id = laix.application_id \ where lai.is_older = 0 and lai.application_time > {} \ and lai.application_time < {}' td_sql = 'select application_id,td_device_id,td_wifiip,td_wifimac \
import pymysql sql = 'select * from my.article' conn = pymysql.Connect(host='192.168.56.101', port=3306, user='******', password='******', charset='utf8') cursor = conn.cursor(cursor=pymysql.cursors.DictCursor) cursor1 = conn.cursor(cursor=pymysql.cursors.DictCursor) cursor.execute(sql) rows = cursor.fetchall() texts = [] titles = [] tags = [] for item in rows: text = item['content'] title = item['title'] tags.append('金融') texts.append(text) titles.append(title) # print(title,'\t',text) sql1 = "insert ignore into renote.note_content(content, tag, creater_id, shared, `type`) VALUES (%s,'金融',8,1,'text')" sql2 = "insert ignore into renote.note(nbook_id, nc_id, `delete`, note_name, gmt_create, gmt_modified)" \ " VALUES (13,%s,0,%s,now(),now())" for text, title in zip(texts, titles): cursor.execute(sql1, (text)) id = cursor.lastrowid
def connect(self): db = pymysql.Connect(host=DB_HOST, user=DB_USER, password=DB_PASSWD, database=DB_NAME, port=DB_PORT, charset='utf8') return db
from ks_02_open_redpacket_more import login, http_request import pymysql time_api = 'https://test_gateway.guochuangyuanhe.com/api/v1/nest/timeInfo/queryNestTimeInfo' db = pymysql.Connect('172.18.228.112', 'root', '3nOI9ca45%$#8Gm7EH', 'gcyh_nest') cursor = db.cursor() cursor.execute( 'select id,longitude,latitude from nest_location where id between 249001 and 249003' ) location_id = cursor.fetchall() def get_frist_time(token): for i in location_id: data = { "nestLocationId": i[0], "locationLatitude": str(i[1]), "locationLongitude": str(i[2]) } print(data) r = http_request(api=time_api, method='post', token=token, **data) print(r) aa = r.get('response').get('current').get('nestTimeInfoId') print(aa) return aa if __name__ == '__main__': phone = (a for a in range(19931997001, 19931997003)) pass_word = 123456 for mobie in phone:
import requests import json import pymysql import time from datetime import datetime # conn = pymysql.connect(user='******', password='******', database='b50_demo', charset='utf8') conn = pymysql.Connect(host="139.196.160.147", port=3306, user='******', password='******', database='B50', charset='utf8') SAMPLING_DELAY = 20 MODIAN_DELAY = 0.5 def update_modian(): """ Description: This function is to sample and resolve detail information of each given modian amount project. Resolved fields include project name, project id, real-time amount, start time, etc. For each fan club, sample all projects it issued. Fields resolved are inserted into database or updated if the project already existed in database. Parameter: none Author: Lu.Biq Pan Version: 1.0 Date: September 2019
from concurrent.futures import ProcessPoolExecutor, ThreadPoolExecutor import threading import requests from lxml import etree import pymysql #用进程池下载数据 pool = ProcessPoolExecutor(4) mysql_conn = pymysql.Connect('127.0.0.1', 'root', 'bc123', 'tengxun', port=3306, charset='utf8') cursor = mysql_conn.cursor() def tulin(page): # for page in range(1,60): url = 'http://www.ituring.com.cn/book?tab=book&sort=hot&page={}'.format( str(page)) headers = { 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.181 Safari/537.36' } response = requests.get(url, headers=headers) content = response.text html = etree.HTML(content) x_url = html.xpath('//div[@class="book-info"]/h4/a/@href') get_datail(x_url)
def __init__(self,MYSQLHOST,MYSQLPORT,MYSQL_DB,MYSQLUSER,MYSQLPWD): self.client = pymysql.Connect(host=MYSQLHOST,port=MYSQLPORT,database=MYSQL_DB,user=MYSQLUSER,password=MYSQLPWD,charset='utf8') self.cur = self.client.cursor()
def saving(): import pymysql conn = pymysql.Connect(host='') cur = conn.cursor()
def open_spider(self, spdier): print('爬虫开始》》》》') self.conn = pymysql.Connect(host='192.168.1.210', user='******', passwd='zhangxing888', db='ktcx_buschance', port=3306, charset='utf8') self.cur = self.conn.cursor() # 获取一个游标
def get_con(): return pymysql.Connect(db_host, db_user, db_password, db_data_base)
def connet_con(self): self.con = pymysql.Connect(host='localhost', user='******', password='******', database='books', autocommit=True)
def open_spider(self,spider): self.conn=pymysql.Connect(host='127.0.0.1',port=3306,user='******',password='******',db='qiubai',charset='utf8')
#!/usr/bin/python3.6 # -*- coding: utf-8 -*- # @Author : 洋燚 # @Email : [email protected] import pymysql connect = pymysql.Connect(host='192.168.1.124', port=3306, user='******', passwd='16899168', db='test_elements', charset='utf8') cursor = connect.cursor() #插入数据 sqlcreate = """CREATE TABLE EMPLOYEE ( FIRST_NAME CHAR(20) NOT NULL, LAST_NAME CHAR(20), AGE INT, SEX CHAR(1), INCOME FLOAT )""" cursor.execute(sqlcreate) connect.commit() # sql = "INSERT INTO trade (name, account, saving) VALUES ( '%s', '%s', %.2f )" # data = ('雷军', '13512345678', 10000) # cursor.execute(sql % data) # connect.commit() print('成功插入', cursor.rowcount, '条数据')
import hashlib import pymysql import settings conn = pymysql.Connect(**settings.parameters) cursor = conn.cursor(cursor=pymysql.cursors.DictCursor) username = input("请输入用户名:") password = input("请输入密码:") password = hashlib.sha1(password.encode('utf8')).hexdigest() sql = "select username,password from user where username=%s and password=%s" res = cursor.execute(sql, [username, password]) if res: print('登陆成功') cursor.close() conn.close()
# jdbc:mysql://127.0.0.1:3306/biyanzhi import pymysql db = pymysql.Connect(host='localhost', port=3306, user='******', passwd='root', db='test', charset='utf8') cursor = db.cursor() def queryInfo(user_id): sql = "select * from user WHERE user_number='%s'" data = (user_id) cursor.execute(sql % data) result = cursor.fetchone() return result[1] # # 关闭数据库连接 # db.close()
import pymysql db = pymysql.Connect('localhost', 'root', '17600117243', 'test_db') cursor = db.cursor() # SQL 更新语句 sql = "UPDATE EMPLOYEE SET AGE = AGE + 1 WHERE SEX = '%c'" % ('M') try: # 执行SQL语句 cursor.execute(sql) # 提交到数据库执行 db.commit() except: # 发生错误时回滚 db.rollback() # 关闭数据库连接 db.close()
# c.execute('UPDATE book SET price=? WHERE id=?',(1000, 1)) # c.execute('DELETE FROM book WHERE id=2') # conn.commit() # conn.close() # # retrieve one record # c.execute('SELECT name FROM category ORDER BY sort') # print(c.fetchone()) # print(c.fetchone()) # # # retrieve all records as a list # c.execute('SELECT * FROM book WHERE book.category=1') # print(c.fetchall()) # # # iterate through the records # for row in c.execute('SELECT name, price FROM book ORDER BY sort'): # print(row) import pymysql conn = pymysql.Connect(host='127.0.0.1', port=3306, user='******', passwd='root', db='imooc', charset='utf8') cursor = conn.cursor() print(conn) print(cursor) cursor.close() conn.close()
# coding=utf-8 import pymysql db = pymysql.Connect( host="192.168.5.99", port=3306, user="******", password="******", database="cms", charset='utf8', ) cur = db.cursor() # 获取游标 try: cur.execute("update user set blance = blance - 100 where name = 'zero';") cur.execute("update user set blance = blance + 100 where name = 'lisi';") db.commit() # 提交 except: db.rollback() # 回滚 cur.close() # 关闭游标 db.close() # 关闭数据库连接
def __init__(self, *args, **kwargs): super(Application, self).__init__(*args, **kwargs) self.db = pymysql.Connect(**config.mysql_options) self.redis = redis.StrictRedis(**config.redis_options) print(self.redis)
# import mysql.connector import pymysql config = { 'host': 'localhost', 'port': 3306, 'database': 'LojaDB', 'user': '******', 'password': '******' } # dbConexao = mysql.connector.Connect(**config) dbConexao = pymysql.Connect(**config) cursor = dbConexao.cursor() comando = 'SELECT * FROM Produtos' cursor.execute(comando) registros = None registros = cursor.fetchall() for registro in registros: print("Codigo: ", registro[0]) print("Nome : ", registro[1]) print("Valor : ", registro[2]) print("\n") cursor.close() dbConexao.close()
import requests import re import time import pandas as pd #import requests import csv import numpy as np import math import pymysql import json connect = pymysql.Connect(host='*****', port=3306, user='******', passwd='****', db='meiti', use_unicode=1, charset='utf8') #from urllib.parse import urlencode def get_url(): np.set_printoptions(suppress=True) csv_file = csv.reader(open('renmingwang.csv', 'r')) urls = [] for stu in csv_file: name = stu[2] id1 = stu[6] time1 = stu[7] text = stu[8]
# 파이썬에서 mysql 데이터베이스를 사용하기 위한 # 모듈을 import # - 설치방법 # - pip install pymysql import pymysql # 데이터베이스 처리 # 파이썬에서는 다양한 데이터베이스 벤더에 대한 # 모듈을 사용할 수 있습니다. # (데이터베이스 벤더사에 따른 모듈을 설치를 진행) # 데이터베이스 프로그래밍 과정 # 1. 데이터베이스 연결 객체 생성 conn = pymysql.Connect(host='localhost', port=3306, user='******', passwd='1234', db='sakila') # 2. 데이터베이스 서버에 쿼리문을 실행할 수 있는 # 객체를 생성(데이터베이스 연결 객체로 부터 생성) cursor = conn.cursor() # 3. SQL문 작성 # - 문자열로 작성 # - 대다수의 데이터베이스 처리 모듈은 ; 생략이 가능 sql = """ select * from rental """
''' Created on 2019年5月1日 # -*- coding:utf-8 -*- @author: Administrator ''' import pymysql import jqdatasdk as jq #jqdata认证 jq.auth('13401179853', 'king179853') #建立连接# 获取游标 connect = pymysql.Connect(host='localhost', port=3306, user='******', passwd='123456', db='xg', charset='utf8') cursor = connect.cursor() #获取流通市值和净利润 date_list = [] sql = "SELECT date FROM rqb order by id " cursor.execute(sql) for row in cursor.fetchall(): date_list.append(row) for k in range(0, len(date_list)): df_volandincome = jq.get_fundamentals(jq.query( jq.valuation.code, jq.valuation.circulating_market_cap, jq.valuation.pe_ratio, jq.income.total_operating_revenue, jq.income.np_parent_company_owners),
def __enter__(self): if self.conn is None: # 考虑数据库连接是断开的情况 self.conn = pymysql.Connect(**DB_CONFIG) return self.conn.cursor(cursor=DictCursor)