def get_plot_line(start, title): time = start.strftime('%H:%M') try: time = time.decode('UTF-8') except AttributeError: pass return '[B]' + time + '[/B] ' + title + '[CR]'
def setIntegrate(self,time,mono='0'): time = str.encode(time) mono = str.encode(mono) self.s.write(b'O' + mono + b',' + time + b'\r') output = self.s.readline() output = output.decode('utf-8') if output!='o': sys.exit("Bad confirmation for setting integration time. Response: " + output) print("Integration time set to " + time.decode('utf-8') + "ms")
def handle_multi_message(run, thread, time, n, *msgs): msg = "{{run: {}, time: {}}}".format(run, time.decode('utf8')) Server.printc(msg, 'multi') for i in range(n): typ, msg = msgs[2 * i:2 * i + 2] for j, line in enumerate(msg.decode('utf8').splitlines()): if i < n - 1: prefix = " ├─ " if j == 0 else " │" else: prefix = " └─ " if j == 0 else " " Server.printc(prefix, 'multi', line, typ) click.echo()
def instantiate_job(jid, status, command, time): if type(jid) == str: return { 'status': status, 'command': command, 'time': time, 'id': jid, } return { 'id': jid.decode('utf-8'), 'status': status.decode('utf-8'), 'command': command.decode('utf-8'), 'time': time.decode('utf-8') }
def parse_and_sort_unicode_piecemeal(): items = [] filename = Settings().get_timelog_file() for line in open(filename, 'rb'): time, sep, entry = line.partition(b': ') if not sep: continue try: time = parse_datetime(time.decode('ASCII')) except (ValueError, UnicodeError): continue entry = entry.strip().decode('UTF-8') items.append((time, entry)) items.sort(key=itemgetter(0)) return items
def load_bj_taxi_meteorology(timeslots=None): path = os.path.join(utils.get_data_path(), 'TaxiBJ', 'BJ_Meteorology.h5') with h5py.File(path, 'r+') as f: Weather = f['Weather'][()] date = f['date'][()] # byte->str date = np.array([time.decode('utf-8') for time in date]) # 创建索引 index = [np.where(date == time)[0][0] for time in timeslots] weather = [] for idx in index: weather.append(Weather[idx - 1]) weather = np.asarray(weather) return weather
def read_data(rf_file=None): """Convert rf binary data into :class:`~numpy.ndarray` of power and time. .. code-block:: python from embers.rf_tools.rf_data import read_data power, times = read_data(rf_file='~/embers-data/rf.txt') :param rf_file: path to rf binary data file :class:`str` :returns: - power - power in dBm :class:`~numpy.ndarray` - times - times in UNIX :class:`~numpy.ndarray` """ with open(rf_file, "rb") as f: next(f) lines = f.readlines() times = [] data_lines = [] for line in lines: time, data = line.split("$Sp".encode()) times.append(time.decode()) # List converts bytes to list of bytes # The last two charachters are excluded - Newline char data_lines.append(list(data[:-2])) # The (-1/2) converts an unsigned byte to a real value power = np.single(np.asarray(data_lines) * (-1 / 2)) times = np.double(np.asarray(times)) return (power, times)
import socket import os import time host = input("\nBaglanilacak Hedef Sunucunun IP'sini giriniz : ") port = 142 with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: s.connect((host, port)) s.send(bytes("gecikme_suresi", encoding='utf-8')) register = s.recv(128) s.send(bytes("client", encoding='utf-8')) time = s.recv(128) time_and_timezone = (time.decode("utf-8")) characters_to_remove = "[]'" for character in characters_to_remove: time_and_timezone = time_and_timezone.replace(character, "") timezone = time_and_timezone.split(',')[0] time = time_and_timezone.split(',')[1] os.system('date --set "%s" +\"%%A %%d %%B %%Y %%H:%%M:%%S.%%6N\"' % time) s.close()
def posturllib(user='', passwd='', wtype='', keyword='', types=''): cout = 1 #登录的主页面 hosturl = 'http://sm.viewslive.cn/login.php' #自己填写 #设置一个cookie处理器,它负责从服务器下载cookie到本地,并且在发送请求时带上本地的cookie cj = cookielib.LWPCookieJar() cookie_support = urllib2.HTTPCookieProcessor(cj) opener = urllib2.build_opener(cookie_support, urllib2.HTTPHandler) urllib2.install_opener(opener) #打开登录主页面(他的目的是从页面下载cookie,这样我们在再送post数据时就有cookie了,否则发送不成功) h = urllib2.urlopen(hosturl) #构造header,一般header至少要包含一下两项。这两项是从抓到的包里分析得出的。 headers = { 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:14.0) Gecko/20100101 Firefox/14.0.1', 'Referer': 'http://sm.viewslive.cn/login.php' } #构造Post数据,他也是从抓大的包里分析得出的。 postData = { 'password': '******' % passwd, 'username': '******' % user, # 'password':'******', # 'username':'******' } data = { 'keyword': '%s' % keyword, 'page': '1', 'channel_types': "%s" % types, 'wtype': "%s" % wtype, } #连接数据库 conn = MySQLdb.connect(host="10.6.2.121", port=3306, user="******", passwd="root", charset="utf8") cur = conn.cursor() #需要给Post数据编码 postData = urllib.urlencode(postData) data = urllib.urlencode(data) #通过urllib2提供的request方法来向指定Url发送我们构造的数据,并完成登录过程 request = urllib2.Request(hosturl, postData, headers) urllib2.urlopen(request) html = urllib2.urlopen("http://sm.viewslive.cn/search_ajax.php", data, timeout=10).read() html = html.decode("utf8") #将HTML解析成dom树 dom = etree.HTML(html) for i in dom.xpath("//div[contains(@class,'WB_feed_type')]"): users = "".join(i.xpath(".//a[@class='bold pop-over']/text()")) contens = "".join( i.xpath( ".//div[@class='weibomsg fl']//text() |.//div[@class='zhuanfa_div2 fl']//text()" )) times = "".join(i.xpath(".//ul[@class='detailFooter fl']/li/a/text()")) urls = "".join(i.xpath(".//ul[@class='detailFooter fl']/li/a/@href")) zfs = "".join(i.xpath(".//ul[@class='detailFooter fr']/li[1]/text()")) pls = "".join(i.xpath(".//ul[@class='detailFooter fr']/li[2]/text()")) imgurl = "".join(i.xpath(".//img[@class='ch-image img-avatar']/@src")) #格式化数据 content = contens.replace("\r\n", "").replace("\t", "") zfs = re.sub("\D+", "", zfs) pls = re.sub("\D+", "", pls) uid = re.sub("\D+", "", urls) mid = re.sub("http.*\/", "", urls) time = times.encode("utf8") month = int(re.findall("[0-9]+", time)[0]) if month > datetime.datetime.now().month: time = "2015-" + time.replace("月", "-").replace("年", "-").replace( "日", "") else: time = "2016-" + time.replace("月", "-").replace("年", "-").replace( "日", "") # 采集入库 try: time = time.decode("utf8") cur.execute( u"INSERT INTO weibo.sina(IR_UID,IR_MID,IR_CREATED_AT,IR_STATUS_CONTENT,IR_RTTCOUNT,IR_GROUPNAME,IR_SITENAME,KEYWORD,IR_SCREEN_NAME,IR_PROFILE_IMAGE_URL) VALUES ('%s','%s','%s','%s','%s','python新浪微博','新浪微博','%s','%s','%s')" % (uid, mid, time, content, zfs, keyword.decode("utf8"), users, imgurl)) except MySQLdb.Error, e: print "Mysql Error %d: %s" % (e.args[0], e.args[1]) #打印相关数据 print "#############%s#############" % cout print u"url:" + urls print u"imgurl:" + imgurl print u"uid:" + uid print u"mid:" + mid print u"keyword:" + keyword.decode("utf8") print u"时间:" + time print u"用户:" + users print u"内容:" + content print u"转发数:" + zfs print u"评论数:" + pls #计数器 cout += 1
new_user = USERS(nu, email, na) conn = sqlite3.connect("data.db") a = conn.cursor() a.execute('INSERT INTO users VALUES(?,?,?)', (new_user.number, new_user.e_mail, new_user.name)) conn.commit() conn.close() z.send('done'.encode()) z.close() elif data == 'test': sub = z.recv(1024) sub = sub.decode() des = z.recv(1024) des = des.decode() time = z.recv(1024) time = time.decode() date = z.recv(1024) date = date.decode() conn = sqlite3.connect('data2.db') cursor = conn.cursor() cursor.execute(f"UPDATE tests SET time = {time} where sub = {sub}") conn.commit() cursor.execute(f"UPDATE tests SET day = {date} where sub = {sub}") conn.commit() cursor.execute(f"UPDATE tests SET des = {des} where sub = {sub}") conn.commit() conn.close() elif data == 'refresh': conn = sqlite3.connect("data.db")
def index(request): input_get = '' words = [] ideal_nums = [] info_list = [] info_list_num = 0 if request.method == 'GET': if request.GET.has_key('input'): input_get = request.GET['input'] wordstmp = pseg.cut(input_get) for word in wordstmp: #print word.word words.append(word.word) #print words[0] if request.GET.has_key('search'): if input_get == '': return render(request, 'notfound.html') else: for word in words: word = word.replace("\n", "") for r in rows: if word == r[1][0:-1]: ideal_nums.append(r[0]) #print ideal_num if len(ideal_nums) == 0: return render(request, 'notfound.html') for ideal_num in ideal_nums: cur.execute('select html_num from N' + str(ideal_num)) html_nums = cur.fetchall() #print html_nums for h in html_nums: #print h[0] fcontain = open('./Search/contain/' + str(h[0]) + '.txt') title = fcontain.readline() #print title bl = fcontain.readline() ff = fcontain.read() ff = ff.decode("utf-8") if len(ff) < 400: contain = ff else: contain = ff[0:401] for word in words: word = word.replace(" ", "") if word == "": continue tmp_word = "<font color='red'>" + word + "</font>" #word = word.decode("utf-8") #tmp_word = tmp_word.decode("utf-8") contain = contain.replace(word, tmp_word) #print contain ftime = open('./Search/time/' + str(h[0]) + '.txt') time = ftime.read() time = time.decode("utf-8") if request.GET.has_key('year'): #print 'ahha' if time[0:4] != str(cur_year): #print time[0:4], cur_year continue elif request.GET.has_key('month'): tmp_month = "" if cur_month <= 9: tmp_month = "0" + str(cur_month) else: tmp_month = str(cur_month) if time[5:7] != tmp_month: #print time[5:7], tmp_month continue elif request.GET.has_key('week'): tmp_week = "" if time[8] == "0": tmp_week = time[9] else: tmp_week = time[8:10] if cur_week - int(tmp_week) > 7: print cur_week, tmp_week continue info_list.append({ 'title': title, 'contain': contain, 'time': time, 'url_num': str(h[0]) }) info_list_num += 1 return render(request, 'index.html', { 'info_list': info_list, 'info_list_num': info_list_num }) return render(request, 'home.html')
def _build_line(line, time, place): return "{} - in {} minutes - to {}".format(line.decode('latin-1'), time.decode('latin-1'), place.decode('latin-1'))
def __format_gui__(self, stat, time): ''' Private method to format an output as the WRPC GUI does. ONLY compatible for WR Core build e261d79-dirty or newer Args: stat (str) : Raw data from stat command time (str) : Raw data from time command ''' sync_info_valid = 2 raw = stat.decode('utf8') time = time.decode('utf8') board_mode = self.mode_regex.search(raw).group(0) if board_mode is None: raise Error(p7sException.err[Ewberrno.ENODEV], "Could not retrieve mode from WR-LEN") wr0 = self.wr0_regex.search(raw) wr1 = self.wr1_regex.search(raw) if wr0 is None or wr1 is None: print("\n") return wr0_enable = wr0.group(1) == '1' wr1_enable = wr1.group(1) == '1' sys.stdout.write( "\033[94;1mWR PTP Core Sync Monitor: PPSI - WRLEN\033[0m\n") sys.stdout.write("\033[2mEsc = ctrl-c\033[0m\n\n") sys.stdout.write("\033[94mTAI time:\033[0m \033[1m%s\033[0m\n\n" %\ self.time_regex.search(time).group(0)) sys.stdout.write("\033[1;92mWR-LEN mode : \033[1m%s\033[0m\n\n" % board_mode) sys.stdout.write("\033[94;1mLink status:\033[0m\n\n") # Removed to save time # ip_raw = self.vuart.sendCommand("ip") # ip = self.ip_regex.search(ip_raw.decode('utf8')) # # sys.stdout.write("\033[1mIPv4:\033[0m%s\033[92m\033[0m\n\n" % (ip)) # mode = self.vuart.sendCommand("mode") try: mode = self.__secure_sendCommand__("mode") except Error as e: raise e # Port wr0 info if wr0_enable: m = "WR Master" if mode == "master" or mode == "slave_wr1" else "WR Slave" sys.stdout.write("\033[1mwr0 :\033[92m Link up \033[0m\033[2m(RX: %s, TX: %s), mode: \033[0m\033[1m%s \033[0m\033[1;92m%s\033[0m\n\n" %\ (wr0.group(self.INDEX_RX), wr0.group(self.INDEX_TX), m,"Locked" if wr0.group(self.INDEX_LOCK)=="1" else "Link down")) else: sys.stdout.write("\033[1mwr0 : \033[1;31mLink down\033[0m\n\n") sync_info_valid -= 1 # Port wr1 info if wr1_enable: m = "WR Master" if mode == "master" or mode == "slave_wr0" else "WR Slave" sys.stdout.write("\033[1mwr1 :\033[92m Link up \033[0m\033[2m(RX: %s, TX: %s), mode: \033[0m\033[1m%s \033[0m\033[1;92m%s\033[0m\n\n" %\ (wr1.group(self.INDEX_RX), wr1.group(self.INDEX_TX), m,"Locked" if wr1.group(self.INDEX_LOCK)=="1" else "Link down")) else: sys.stdout.write("\033[1mwr1 : \033[1;31mLink down\033[0m\n\n") sync_info_valid -= 1 show_fail = False if sync_info_valid >= 1: rtt = self.rtt_regex.search(raw) if rtt == None: show_fail = True else: ss = self.ss_regex.search(raw).group(1) syncs = self.syncs_regex.search(raw).group(1) sys.stdout.write( "\033[1mServo state: %s\033[0m\n" % ss) sys.stdout.write( "\033[1mSynchronization source: %s\033[0m\n\n" % syncs) sys.stdout.write("\033[34mTiming parameters:\033[0m\n\n") rtt = rtt.group(1) sys.stdout.write( "\033[2mRound-trip time (mu): \033[0m\033[1;97m%s ps\033[0m\n" % rtt) msdel = self.msdel_regex.search(raw).group(1) sys.stdout.write( "\033[2mMaster-slave delay: \033[0m\033[1;97m%s ps\033[0m\n" % msdel) mphydel_tx = self.mphydel_regex.search(raw).group(1) mphydel_rx = self.mphydel_regex.search(raw).group(2) sys.stdout.write("\033[2mMaster PHY delays: \033[0m\033[1;97mTX: %s ps, RX: %s ps\033[0m\n" %\ (mphydel_tx, mphydel_rx)) sphydel_tx = self.sphydel_regex.search(raw).group(1) sphydel_rx = self.sphydel_regex.search(raw).group(2) sys.stdout.write("\033[2mSlave PHY delays: \033[0m\033[1;97mTX: %s ps, RX: %s ps\033[0m\n" %\ (sphydel_tx, sphydel_rx)) asym = self.asym_regex.search(raw).group(1) sys.stdout.write( "\033[2mTotal Link asymmetry: \033[0m\033[1;97m%s ps\033[0m\n" % asym) crtt = self.crtt_regex.search(raw).group(1) sys.stdout.write( "\033[2mCable rtt delay: \033[0m\033[1;97m%s ps\033[0m\n" % crtt) cko = self.cko_regex.search(raw).group(1) sys.stdout.write( "\033[2mClock offset: \033[0m\033[1;97m%s ps\033[0m\n" % cko) psetp = self.psetp_regex.search(raw).group(1) sys.stdout.write( "\033[2mPhase setpoint: \033[0m\033[1;97m%s ps\033[0m\n" % psetp) sys.stdout.write( "\033[2mUpdate interval: \033[0m\033[1;97m%.1f sec\033[0m\n" % self.refresh) elif sync_info_valid < 2 or show_fail: sys.stdout.write( "\033[1;31mMaster mode or sync info not valid\033[0m\n\n")
def parseData(driver,html): # instantiate a soup soup = BeautifulSoup(html) moreactivity = driver.find_element_by_id("moreactivitylink") asuser=driver.find_element_by_id("asuser") asuser.click() while moreactivity.text != "": moreactivity.click() # find all <div> with class "content hasavatar" profiles = soup.find_all(attrs = {"class": "content hasavatar"}) # lists lProfiles = [] # in each <a> for profile in profiles: try: # print profile datetime = profile.span.text datecombo = (datetime.strip()).split(",") replied = profile.find("div",{"class": "title"}) name=replied.a["href"] firsta=replied.a subforum = firsta.findNext("a") subforum=subforum.findNext("a") subforum=subforum["href"] subforum=subforum.split("?") id = profile.find("div",{"class": "fulllink"}) linkcontent = id.a linktext = linkcontent["href"] startedvalue=linktext idcombo=linktext.split("&") ######----------------------- resp = (replied.text.strip()).encode("utf-8") if "replied" in resp: resp="replied"; if "started" in resp: resp= "started" name= name.split("?") username=name[1] date=(datecombo[0].strip()).encode("utf-8") time=(datecombo[1].strip()).encode("utf-8") justtime=time.decode("utf-8").encode("latin-1").decode("utf-8") time= justtime.strip() time = time.encode("utf-8") threadid = (idcombo[0].strip()).encode("utf-8") threadnum = threadid.split("?") threadnum = (threadnum[1]).encode("utf-8") if resp == "started": uniqueid=startedvalue.split("?") uniqueid=uniqueid[1] else: uniqueid = (idcombo[1].strip()).encode("utf-8") uniqueid=uniqueid.split("#") uniqueid=(uniqueid[0]).split("=") uniqueid=uniqueid[1] subforumid=subforum[1] # lProfiles.append("\n") lProfiles.append(date) lProfiles.append(time) lProfiles.append(username) lProfiles.append(threadnum) lProfiles.append(subforumid) lProfiles.append(uniqueid) lProfiles.append(resp) lProfiles.append("\n\r") except: pass datetime = "" datecombo = "" replied = "" id = "" linkcontent = "" linktext = "" idcombo = "" time = "" date = "" threadid = "" uniqueid = "" resp = "" writeintocsv(lProfiles) return lProfiles
def decode_time(timeslots): """ change time for ascii from utf-8 """ return [time.decode('utf-8') for time in timeslots]