def get_session(name, password): proxy = getIP("") url, session = do_login(name, password, proxy) if url != '': rs_cont = session.get(url, headers=headers, proxies=proxy) login_info = rs_cont.text u_pattern = r'"uniqueid":"(.*)",' m = re.search(u_pattern, login_info) if m and m.group(1): check_url = 'http://weibo.com/2671109275/about' session.get(check_url, headers=headers, proxies=proxy) print('Login successful! The login account is {}'.format(name)) pickled_cookies = json.dumps({ 'cookies': session.cookies.get_dict(), 'loginTime': datetime.datetime.now().timestamp(), 'proxy': proxy['http'] }) with open('cookie', 'w') as file: file.write(pickled_cookies) return session print('login failed for {}'.format(name)) return None
def __init__(self, account=None, ip_address=None, mac_address=None, w3=None, client_number: int = None): ''' Default constructor of the interface class, with parameters that make the code much cleaner in our client API . For now the only parameter is the client number which identifies which of the default accounts provided by ganache-cli the client sets as its default one {1 - 9} since 0 is for the server and owner of the contract ''' # In case there's no w3 given we initialize to the default server if w3 is None: self.w3 = Web3(HTTPProvider("http://localhost:8545")) #self.w3 = Web3(HTTPProvider("http://192.168.0.29:8545")) else: self.w3 = w3 # Regular register (not from proxy) if ip_address is None and mac_address is None: self.IP = utils.getIP() self.MAC = utils.getMAC() else: self.IP = ip_address self.MAC = mac_address self.contract = self._getContract(self.w3) #self.contract = self._load_contract(self.w3) # The ConciseContract class is much better for reaidng variables straight away. # If our goal is not transact something, we'll use the Concise version of the contract self.contractConcise = ConciseContract(self.contract) # set the default account for the client, if it's None we'll register and we'll get a new one # Only for tests if client_number is not None: self.w3.eth.defaultAccount = self.w3.eth.accounts[client_number] self.account = self.w3.eth.defaultAccount else: # Usamos el w3 del proxy en ese caso # The client's own address is in the account variable self.account = account # The default account is however the one of the server # DATA FOR LEASES: # The dictionary is of the shape { grant_id : amount } so for the total memory we # just add the values self.remoteStorage = {} # The dictionary is of the shape { grant_id : amount } where amount is a number from # 0 to 100. 100 would mean that the amount is equal to all the available storage on # the remote server self.remoteCPU = {} # Load the reservations self._load_reservations()
def main(rds): # 从rds里取详情页url,请求 并 解析 ip = getIP() flag = 1 while flag: detailUrl = rds.spop('dbds') if not detailUrl: flag = 0 try: res = requests.get(url=detailUrl, proxies={'https': ip}, verify=False) # time.sleep(1) except Exception as e: rds.sadd('dbds', detailUrl) ip = getIP() if not ip: sys.exit('IP用完了') print(f'请求出错,错误原因:\n{e}已更换IP:{ip}') logging.info(f'请求出错,错误原因:[{e}],链接:{detailUrl}') continue if '检测到有异常' in res.text: ip = getIP() if not ip: sys.exit('IP用完了') print('检测到IP有异常,已更换IP:', ip) rds.sadd('dbds', detailUrl) if '页面不存在' in res.text: continue try: result = dbdsParser(detailUrl, res.text) except: writeurl2txt('data/解析错误的URL.txt',detailUrl) else: write2csv('data/豆瓣读书1030_2.csv', result) writeurl2txt('data/豆瓣读书存在的7位数URL.txt',detailUrl)
def getAllUrlUseTag(rds): ip = getIP() flag = 1 while flag: url = rds.spop('dbfl') try: res = requests.get( url=url, verify=False, proxies={'https': ip}, cookies={ 'cookies': 'bid=TX46Fh960Io; gr_user_id=9472f59e-3423-469c-a898-4d7be0efe16f; _vwo_uuid_v2=D945973C56E9DE5A89F4A407FF5B9F65B|8193048ef938ca0f9e21e82b5744da7a; __yadk_uid=IPSJiIkXJpASML3BRiVvfPmTQxziqRaY; viewed="2230208_25849649_1019210_6849293_6849290_20365152_2060130_6885810_25780889_3315384"; ct=y; ps=y; push_noty_num=0; push_doumail_num=0; dbcl2="179755333:lBCXZdA+b1Y"; __utmv=30149280.17975; ck=Ybkc; __utmc=30149280; __utmz=30149280.1539673041.4.2.utmcsr=baidu|utmccn=(organic)|utmcmd=organic; __utmc=81379588; __utmz=81379588.1539673041.4.2.utmcsr=baidu|utmccn=(organic)|utmcmd=organic; gr_cs1_ffc94504-020a-4b55-a144-fc8e796f6f1c=user_id%3A1; _pk_ref.100001.3ac3=%5B%22%22%2C%22%22%2C1539679774%2C%22https%3A%2F%2Fwww.baidu.com%2Flink%3Furl%3DsuTcGShpmJjLainnnS6EuguD_DelMI8XRcQh3k6YmQ-S9Wsyxf3kOfuoYJfimrjL%26wd%3D%26eqid%3De2bd69540001c29e000000065bc58bc9%22%5D; _pk_ses.100001.3ac3=*; __utma=30149280.322353021.1539312805.1539677732.1539679774.6; __utma=81379588.2102712258.1539312976.1539677732.1539679774.6; ap_v=0,6.0; gr_session_id_22c937bbd8ebd703f2d8e9445f7dfd03=cf00eb62-9699-4cb3-a2cf-477014a9081e; gr_cs1_cf00eb62-9699-4cb3-a2cf-477014a9081e=user_id%3A1; gr_session_id_22c937bbd8ebd703f2d8e9445f7dfd03_cf00eb62-9699-4cb3-a2cf-477014a9081e=true; __utmb=81379588.10.10.1539679774; _pk_id.100001.3ac3=d01456c0712c87d8.1539312977.6.1539681674.1539677742.; douban-fav-remind=1; __utmb=30149280.94.4.1539681799685' }) except: ip = getIP() print(ip) rds.sadd('dbfl', url) continue if '检测到有异常请求' in res.text: print('检测到有异常请求') ip = getIP() print(ip) rds.sadd('dbfl', url) continue time.sleep(0.8) soup = BeautifulSoup(res.text, 'html.parser') a_tags = soup.select('#subject_list .nbg') for tag in a_tags: href = tag.attrs.get('href', '') writeurl2txt('豆瓣读书书籍URL.txt', href) rds.sadd('dbds', href) leftNums = rds.scard('dbfl') print('rds剩余:', leftNums) flag = leftNums
def __init__(self): """init """ # run self.keyword = keyword = CONF['device'].get('keyword', 'usb') self.device = findDevice(keyword) self.cardnumber = '' # request getip_ip = CONF['url'].get('getip_ip', '127.0.0.1') getip_port = CONF['url'].get('getip_port', 80) self.my_ip = getIP((getip_ip, getip_port)) self.url = CONF['url'].get('url', None) self.log = logging.getLogger('main') # heartbeat self.interval = CONF['interval'].get('heartbeat', 30)
def GetCertificateProperty(config): """add info used for ETCD into config""" masterips = [] masterdns = [] etcdips = [] etcddns = [] ippattern = re.compile("^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$") for i, value in enumerate(config["kubernetes_master_node"]): if ippattern.match(value): masterips.append(value) else: masterdns.append(value) config["apiserver_ssl_dns"] = "\n".join( ["DNS." + str(i + 5) + " = " + dns for i, dns in enumerate(masterdns)]) config["apiserver_ssl_ip"] = "\n".join([ "IP.{} = {}".format(i, sslip) for i, sslip in enumerate([config["api-server-ip"]] + config["ssl_localhost_ips"] + masterips) ]) # kube-apiserver aggregator use easyrsa to generate crt files, we need to generate a group of master names for it. # It does not care if it's a DNS name or IP. masternames = [] for i, value in enumerate(config["kubernetes_master_node"]): masternames.append(value) config["apiserver_names_ssl_aggregator"] = ",".join( ["DNS:" + name for i, name in enumerate(masternames)]) # TODO(harry): this only works for single master, if we have multiple masters, we need to have a reserved static IP to be used here and for the whole cluster. config["master_ip_ssl_aggregator"] = utils.getIP( config["kubernetes_master_node"][0]) for i, value in enumerate(config["etcd_node"]): if ippattern.match(value): etcdips.append(value) else: etcddns.append(value) config["etcd_ssl_dns"] = "\n".join( ["DNS." + str(i + 5) + " = " + dns for i, dns in enumerate(etcddns)]) config["etcd_ssl_ip"] = "\n".join([ "IP.{} = {}".format(i, sslip) for i, sslip in enumerate(config["ssl_localhost_ips"] + etcdips) ]) return config
def preprocess(self, request): ip = getIP(request) url = request.path print "request from %s to path %s" % (ip, url)
def GET_test(self, request): self.preprocess(request) ip = getIP(request) request.setResponseCode(200) return "request from ip %s" % ip
import os import json import socket import utils import time with open('name.conf', 'r') as f: domainname = f.read() conn = ('47.103.45.126', 23333) current_ip = "" while True: if not current_ip == utils.getIP(): current_ip = utils.getIP() print( utils.send( conn, json.dumps({ 'type': 'register', 'params': { 'ip': utils.getIP(), 'domainname': domainname } }))) time.sleep(60)