def runUpdateDaemon(restartTimerFunc): updateDao = dao.dao() for userID in daemonMap.keys(): if not userID in onlineFriends: onlineFriends[userID] = set([]) friendListUpdate = set([]) friendListOffline = set([]) fullFriendList = updateDao.getFriends(userID) for (_, friendID) in fullFriendList: if friendID in onlineClients: friendListUpdate.add(friendID) else: friendListOffline.add(friendID) # Build lists (actually sets) of friends who went offline or online # Offline: Friend exists in old list but not in new one # Online: Friend exists in new list but not in old one offlinelist = onlineFriends[userID] & friendListOffline onlinelist = friendListUpdate - (onlineFriends[userID] & friendListUpdate) # Creating the actual data to send -- this uses some craziness with mapping functions # Basically, the first line does the offline list, and the second does the online one payload = ''.join(starmap(upFormat.pack, list(izip_longest(offlinelist, '', fillvalue=0)))) payload = payload + ''.join(starmap(upFormat.pack, list(izip_longest(onlinelist, '', fillvalue=1)))) onlineFriends[userID] = onlinelist sock = daemonMap[userID] sock.send(padToSize(pktFormat.pack(0, r_status_update, len(payload)) + payload, BUFSIZE)) del updateDao restartTimerFunc()
def GET(self): try: db = dao('10.157.15.161', '2AU', 'password', '2AU') cursor = db.getDao() cursor.execute("SELECT * FROM evenement") query_result = [ dict(line) for line in [ zip([column[0] for column in cursor.description], row) for row in cursor.fetchall() ] ] print query_result except Exception, e: print "Error [%r]" % (e) sys.exit(1)
def getResult(stock_name): # Spliting the string input into separate part to map the stocks stock_list = stock_name.split(",") obj = dao.dao() portfolio = {} print(stock_list) for item in stock_list: now_data = obj.listgetfunc(item) # get the stock volume now_v = obj.getfunc(item+'-v') # get the stock latest price now_p = obj.getfunc(item + "-p") portfolio[item] = [float(now_data[5]), int(now_v), float(now_p)] cal_obj = calculation.calculation() result = cal_obj.cal_portofilo(portfolio) print(result) index = obj.getfunc("index") return render_template('result.html', tech_index=float(index), portfolio=result, stock_name=stock_name)
Created on Feb 4, 2012 @author: Tim Biggs ''' from exceptions import Exception from structlib import * from common import * from struct import error from itertools import izip_longest, starmap import socket import hashlib import dao daemonMap = dict() dbdao = dao.dao() ''' Grabbed from python documentation for backwards compatibility with 2.5 (because Vogon is old) ''' ''' class ZipExhausted(Exception): pass def izip_longest(*args, **kwds): fillvalue = kwds.get('fillvalue') counter = [len(args) - 1] def setinel(): if not counter[0]: raise ZipExhausted
"type": "apiKey", "in": "header", "name": "access_token" } } #Initialisation du moteur d'execution de l'API app = Flask(__name__) CORS(app) api = Api(app, authorizations=authorizations) #Instanciation de la couche de données #Les paramètres seront passés à l'installation de l'image Docker #L'ensemble des paramètres d'accès à la base de données sont passés dans la commande #docker qui lance le serveur dao = dao.dao(server=sys.argv[2], username=sys.argv[3], password=sys.argv[4]) #http://localhost:8090/index.html?server=http://localhost&port=5800 #Mise en place de l'API d'obtention du token sur base d'un couple user/mot de passe_____________________________________ auth_parser = reqparse.RequestParser() auth_parser.add_argument('username', required=True, type=str, help='username to use the API') auth_parser.add_argument('password', required=True, type=str, help='password to use the API') @api.route("/auth")
class pngtree(): # 设置请求超时时间 socket.setdefaulttimeout(50) global mydao mydao = dao('localhost', 'root', '123456', 'test') # global dao def __init__(self,host,cat,sort_rule): self.url = 'https://pngtree.com' self.cat = cat self.date = time.strftime("%d/%m/%Y") self.sort_rule = sort_rule self.pool = redis.ConnectionPool(host=host, port=6379, decode_responses=True) # host是redis主机,需要redis服务端和客户端都起着 redis默认端口是6379 self.r = redis.Redis(connection_pool=self.pool) def get_list(self): for i in range(1,201): # 拼接成这种的url https://pngtree.com/free-vectors/193?sort=popular if self.cat == 'Recently-Download': list_url = 'https://pngtree.com/freepng/{}/{}'.format(self.cat, i) else: list_url = 'https://pngtree.com/{}/{}?sort={}'.format(self.cat,i,self.sort_rule) print list_url #从队列中取出一个子类URL list_html = self.get_html(list_url) if list_html != 0: if list_html is not None: urls = list_html.xpath('//*[@id="v2-content"]/div/div[2]/div/ul/li//div//a[@class="tran"]/@href') for url in urls: detail_url = self.url + url print detail_url self.get_detail(detail_url) # 处理图片的tag tag_key = '{}_{}_{}'.format(self.cat, self.sort_rule, 'tag') # 读取tag_key的所有的值和score的数值 tag_list = self.r.zrange(tag_key, 0, -1, withscores=True, desc=True) for i in tag_list: tag, count = i print tag, count # 存入MySQL数据库 png_test.redis_to_mysql(str(tag), int(count), tag_key) # 处理图片的相关的tag related_tag_key = '{}_{}_{}'.format(self.cat, self.sort_rule, 'related_tag') # 读取related_tag_key的所有的值和score的数值 related_tag_list = self.r.zrange(related_tag_key, 0, -1, withscores=True, desc=True) for i in related_tag_list: tag, count = i print tag, count # 存入mysql数据库 png_test.redis_to_mysql(str(tag), int(count), related_tag_key) # 删除redis的2个key self.r.delete(tag_key,related_tag_key) def get_detail(self,detail_url): sql = 'select tags,related_tags from pngtree_hot_tags where url = "%s"' % detail_url # 去mysql pngtree_hot_tags表中获取tags,获取到就更新日期为今日,未获取就进行HTTP请求抓取tags存入数据库 tags_saved = mydao.get_one(sql) # print tags_saved if tags_saved == None or tags_saved == False: detail_html = self.get_html(detail_url) # 判断是否链接超时,链接超时返回是0 if detail_html != 0 and detail_html is not None: # 获取tags tags = detail_html.xpath('//*[@id="v2-details"]/div/div[2]/div[2]//text()') # 防止出现404等页面 if tags: tag_key = '{}_{}_{}'.format(self.cat, self.sort_rule, 'tag') tags_list = [] for i in tags: if '\n' not in i: print i if i == '': pass else: i = i.strip() print i # 连接redis tags_list.append(i) self.r.zincrby(tag_key, i) # 把tags列表转换成字符串 tags_text = ','.join(tags_list) # 获取related_tags related_tags = detail_html.xpath('//*[@id="v2-details"]/div/div/div[4]//text()') if related_tags: related_tag_key = '{}_{}_{}'.format(self.cat, self.sort_rule, 'related_tag') related_tags_list = [] for i in related_tags: if '\n' not in i: if i == 'Related recommendation:': pass elif i == '': pass else: print i i = i.strip('') print i related_tags_list.append(i) self.r.zincrby(related_tag_key, i) related_tags_text = ','.join(related_tags_list) # 把url,tags存入mysql数据库pngtree_hot_tags表中 table_name = 'pngtree_hot_tags' data = {'url': detail_url, 'tags':tags_text,'related_tags':related_tags_text, 'dates': self.date} mydao.insert(table_name, data) else: # 如果mysql数据库中有相关数据,就读取相关数据,存入redis tags = tags_saved[0].split(',') tag_key = '{}_{}_{}'.format(self.cat, self.sort_rule, 'tag') for i in tags: if '\n' not in i: print i if i == '': pass else: self.r.zincrby(tag_key, i) related_tags = tags_saved[0].split(',') related_tag_key = '{}_{}_{}'.format(self.cat, self.sort_rule, 'related_tag') for i in related_tags: if '\n' not in i: if i == 'Related recommendation:': pass elif i == '': pass else: print i # 向redis数据库中插入数据 zset self.r.zincrby(related_tag_key, i) # 更新数据日期为今日 restriction_str = 'url="%s"'%detail_url mydao.update('pngtree_hot_tags',{'dates':self.date},restriction_str) def get_proxy(self): sql = "select id,proxy from proxy_google_tag order by status asc limit 1 " try: # 获取代理 proxy = mydao.get_one(sql) id,proxie = proxy # 更新代理时间戳 mydao.update('',{'status':int(time.time())},'id=%s'%id) proxies = { 'http':'http://%s' % proxie, 'https':'https://%s' % proxie } except Exception, e: print str(e) proxies = '' return proxies
"//a[@class='btn btn-xs btn-default btn-next']").click() main(urls) except: driver.close() def scrapingtext(): WebDriverWait(driver, 25).until( EC.presence_of_all_elements_located((By.ID, "btn-readmore"))) try: # driver.find_element_by_xpath("//a[@id='btn-readmore']").click() driver.find_element_by_xpath( "//div[@class='hide-article-box text-center']").click() WebDriverWait(driver, 5) except: pass # driver.find_element_by_xpath("//a[@class='btn btn-xs btn-default btn-next']").click() # title=driver.find_element_by_xpath("//h1[@class='title-article']").text title = driver.find_element_by_xpath("//h1[1]").text context = driver.find_element_by_id("content_views").text print(title) print(context) if __name__ == '__main__': main(urls) print(urls) dao(urls)
MAC_ADDR = "{0:x}:{1:x}:{2:x}:{3:x}:{4:x}:{5:x}".format(data[28],data[29],data[30],data[31],data[32],data[33])# 28-33 pass except TypeError: print("****************TYPE ERROR**********") errFile = open("errorLog67.txt","ab") errFile.write(data) errFile.close() errFile = open("errorLog67.txt","a") errFile.write("\n") errFile.close() pass else: print(MAC_ADDR) print ("options: ", data[236:]) watched = dao.dao(dao.MacInfo('select',None,None,None,"select * from address_book where mac = '{}'".format(MAC_ADDR))) #notify listening devices if watched is None: broadcastSocket.sendto(MAC_ADDR.encode(),(LESS_IP,LESS_PORT)) else: if watched[1] == 1: print("Watching: {}".format(watched)) msg = "{} **watch** has connected to your WI-FI".format(watched[2]) broadcastSocket.sendto(msg.encode(),(LESS_IP,LESS_PORT)) else: print("Ignoring: " + watched[2]) #leave a trace outFile = open("dhcpOut.txt","a")
elif opt in ("-u", "--rule"): rule = arg #Verify if the conf params are at least valide if not os.path.exists(playbooks_path): logger.error("%s does not exist" % playbooks_path) sys.exit() #Concatenate playbooks with path if playbooks: for i in range(len(playbooks)): playbooks[i] = playbooks_path + playbooks[i] #check weather the croner call was well issued if is_croner and instances and release: #the croner part needs access to the DB db = dao.dao(host, user, passwd, db_name) AU = upgradeAutomation(release, instances, rule, defined_rules, logger) AU.run_croner(release, instance, logger, cron_file, section_name, cron_hour, cron_minute, deadline, app_location, db) #check weather the launcher call was well issued elif instances and release: #Instanciate needed classes (or lets say tools) AU = upgradeAutomation(release, instances, rule, defined_rules, logger) sm = sendMail.sendMail(instances, release, logger, deadline, opretionLaunchTime, opretionFinishTime, testPlatformLink) pb = playbook.playbook(instances, playbooks, logger) #extract given rules into an array of rules rules = AU.getRules() #Call methodes based on the given rules