def show_graph_steps(self): new_evdlist = list() #print "", Weekday[self.getDayOfWeek("11/12/1970")] evdlist = self.personis_um.get_evidence_new(context=['Devices','Fitbit','Activity'], componentid="steps") import datetime prev_date = date(2000,1,1) i = 0 newval = 0 weekday_data = [0,0,0,0,0,0,0] for ev in evdlist: print datetime.datetime.fromtimestamp(int(ev.time)).strftime('%Y-%m-%d'), ev.value try: um_time = ev.time import datetime tt = time.localtime(int(um_time)) track_date = date(tt[0],tt[1],tt[2]) #datetime.datetime.fromtimestamp(int(time)).strftime('%Y-%m-%d') if prev_date != track_date: if newval != 0: new_evdlist.append(int(newval)) print "Fitbit data on %s is %s"%(str(prev_date), newval) weekday_data[date.weekday(prev_date)] += newval newval = int(ev.value) prev_date = track_date else: newval += int(ev.value) except Exception,e: print e return "Error: "+str(e)
def _parse_datestring(dstr): """ Parse a simple datestring returned by the S3 API and returns a datetime object in the local timezone. """ # This regular expression and thus this function # assumes the date is GMT/UTC m = DATESTR_RE.match(dstr) if m: # This code could raise a ValueError if there is some # bad data or the date is invalid. datedict = m.groupdict() utc_datetime = datetime( int(datedict['year']), int(MONTH_NAMES.index(datedict['month_name'])) + 1, int(datedict['day']), int(datedict['hour']), int(datedict['minute']), int(datedict['second']), ) # Convert the UTC datetime object to local time. return datetime(*time.localtime(calendar.timegm(utc_datetime.timetuple()))[:6]) else: raise ValueError("Could not parse date string: " + dstr)
def CreateComment(): if request.method=='GET': return json.dumps({'message':'Please use method POST!'}) if request.method=='POST': token=request.values.get('token') if token is None: return json.dumps({'message':'Need Token!'}) user=User.verify_auth_token(token) if type(user) is types.StringType: return json.dumps({'message':user}) if request.values.get('DishID') is None: return json.dumps({'message':'Need DishID!'}) if YMDish.query.filter(YMDish.DishID==request.values.get('DishID')).all() is None: return json.dumps({'message':'DishID is invalid!'}) if request.values.get('Content') is None: return json.dumps({'message':'Need Content!'}) else: ym_dish_comments=YMDishComment(UserName=user.UserName,DishID=request.values.get('DishID') ,Time=time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time())) ,Content=request.values.get('Content')) try: ym_dish_comments.save() except: return json.dumps({'message':'Comment Existed!'}) return json.dumps({'message':'Add Comment Success!'})
def intialise_proxy_manager(options): """ Proxy Manager initialization. :param dict options: Proxy manager configuration parameters. """ proxy_manager = None if options['Botnet_mode'] is not None: proxy_manager = Proxy_manager() answer = "Yes" proxies = [] if options['Botnet_mode'][0] == "miner": miner = Proxy_Miner() proxies = miner.start_miner() if options['Botnet_mode'][0] == "list": # load proxies from list proxies = proxy_manager.load_proxy_list( options['Botnet_mode'][1] ) answer = raw_input( "[#] Do you want to check the proxy list? [Yes/no] : " ) if answer.upper() in ["", "YES", "Y"]: proxy_q = multiprocessing.Queue() proxy_checker = multiprocessing.Process( target=Proxy_Checker.check_proxies, args=(proxy_q, proxies,) ) logging.info("Checking Proxies...") start_time = time.time() proxy_checker.start() proxies = proxy_q.get() proxy_checker.join() proxy_manager.proxies = proxies proxy_manager.number_of_proxies = len(proxies) if options['Botnet_mode'][0] == "miner": logging.info("Writing proxies to disk(~/.owtf/proxy_miner/proxies.txt)") miner.export_proxies_to_file("proxies.txt", proxies) if answer.upper() in ["", "YES", "Y"]: logging.info( "Proxy Check Time: %s", time.strftime( '%H:%M:%S', time.localtime(time.time() - start_time - 3600) ) ) cprint("Done") if proxy_manager.number_of_proxies is 0: ServiceLocator.get_component("error_handler").FrameworkAbort("No Alive proxies.") proxy = proxy_manager.get_next_available_proxy() # check proxy var... http:// sock:// options['OutboundProxy'] = [] options['OutboundProxy'].append(proxy["proxy"][0]) options['OutboundProxy'].append(proxy["proxy"][1])
def rate_limit_status(twitter): """Print current Twitter API rate limit status.""" r = twitter.account.rate_limit_status() print("Remaining API requests: %i/%i (hourly limit)" % (r['remaining_hits'], r['hourly_limit'])) print("Next reset in %is (%s)" % (int(r['reset_time_in_seconds'] - time.time()), time.asctime(time.localtime(r['reset_time_in_seconds']))))
def display(adc_temp, adc_co2): global datetime global co2 global temp temp = adc_temp * 5 co2 = adc_co2 * 200 datetime = (time.strftime("%Y-%m-%d ") + time.strftime("%H:%M:00")) print (time.strftime("%H:%M:%S",time.localtime()),';',"{0:04f}".format(adc_temp),';', temp,';', "{0:04f}".format(adc_co2),';',co2)
def PassSubmit(): expert_info=Expert_info.query.filter(Expert_info.UserName==request.values.get("UserName")).first() expert_info.Statue=u'可用' md=hashlib.md5() md.update(request.values.get("UserName")) i=str(md.hexdigest())[1:10] expert_info.ExpertCertificateID='zj-'+i vt=time.localtime(time.time()+31622400) expert_info.ValidTime=time.strftime('%Y-%m-%d',vt) expert_info.save() return json.dumps({'time':time.strftime('%Y-%m-%d',vt),'ExpertCertificateID':'zj-'+i})
def __init__(self): self.temp = None self.time = None self.humid = None self.pressure = None self.max = None self.min = None self.MINTEMP = -40.0 # minimum valid temperature (deg C) # only honors current timezone (plot will not be correct across a DST change) self.timezone = time.timezone self.TZ = time.strftime('%Z', time.localtime())
def json_statistics(request): logger.debug("parsing logs") utils.exec_upri_config('parse_logs') truncate_date = connection.ops.date_trunc_sql('month', 'log_date') privoxy_qs = PrivoxyLogEntry.objects.extra({'month':truncate_date}) privoxy_log = privoxy_qs.values('month').annotate(Count('pk')).order_by('month') dnsmasq_qs = DnsmasqBlockedLogEntry.objects.extra({'month': truncate_date}) dnsmasq_log = dnsmasq_qs.values('month').annotate(Count('pk')).order_by('month') monthly = [[0]*5, [0]*5] now = time.localtime() months = [_localdate(datetime.fromtimestamp(time.mktime((now.tm_year, now.tm_mon - n, 1, 0, 0, 0, 0, 0, 0))),"F") for n in reversed(range(5))] for entry in privoxy_log: cur_month = _localdate(datetime.strptime(entry['month'], '%Y-%m-%d'), "F") monthly[1][months.index(cur_month)] = entry['pk__count'] for entry in dnsmasq_log: cur_month = _localdate(datetime.strptime(entry['month'], '%Y-%m-%d'), "F") monthly[0][months.index(cur_month)] = entry['pk__count'] privoxy_log = PrivoxyLogEntry.objects.values('url').annotate(Count('pk')).order_by('-pk__count') filtered_pages = list() dnsmasq_log = DnsmasqBlockedLogEntry.objects.values('url').annotate(Count('pk')).order_by('-pk__count') blocked_pages = list() for entry in privoxy_log[0:5]: #print entry filtered_pages.append({"url": entry['url'], "count": entry['pk__count']}) for entry in dnsmasq_log[0:5]: #print entry blocked_pages.append({"url": entry['url'], "count": entry['pk__count']}) today = datetime.now().date() total_blocked_queries = DnsmasqBlockedLogEntry.objects.count() today_blocked_queries = DnsmasqBlockedLogEntry.objects.filter(log_date__contains=today).count() pie1_data = [DnsmasqQueryLogEntry.objects.count() - total_blocked_queries, total_blocked_queries] pie2_data = [DnsmasqQueryLogEntry.objects.filter(log_date__contains=today).count() - today_blocked_queries, today_blocked_queries] return HttpResponse(json.dumps({'pie1_data': { 'series': pie1_data }, 'pie2_data': { 'series': pie2_data }, 'filtered_pages': filtered_pages, 'blocked_pages': blocked_pages, 'bar_data': { 'labels': months, 'series': monthly }}), content_type="application/json")
def statuses(twitter, screen_name, tweets, mentions=False, favorites=False, received_dms=None, isoformat=False): """Get all the statuses for a screen name.""" max_id = None fail = Fail() # get portions of statuses, incrementing max id until no new tweets appear while True: try: portion = statuses_portion(twitter, screen_name, max_id, mentions, favorites, received_dms, isoformat) except TwitterError as e: if e.e.code == 401: err("Fail: %i Unauthorized (tweets of that user are protected)" % e.e.code) break elif e.e.code == 400: err("Fail: %i API rate limit exceeded" % e.e.code) rate = twitter.account.rate_limit_status() reset = rate['reset_time_in_seconds'] reset = time.asctime(time.localtime(reset)) delay = int(rate['reset_time_in_seconds'] - time.time()) + 5 # avoid race err("Hourly limit of %i requests reached, next reset on %s: " "going to sleep for %i secs" % (rate['hourly_limit'], reset, delay)) fail.wait(delay) continue elif e.e.code == 404: err("Fail: %i This profile does not exist" % e.e.code) break elif e.e.code == 502: err("Fail: %i Service currently unavailable, retrying..." % e.e.code) else: err("Fail: %s\nRetrying..." % str(e)[:500]) fail.wait(3) except urllib2.URLError as e: err("Fail: urllib2.URLError %s - Retrying..." % str(e)) fail.wait(3) except httplib.error as e: err("Fail: httplib.error %s - Retrying..." % str(e)) fail.wait(3) except KeyError as e: err("Fail: KeyError %s - Retrying..." % str(e)) fail.wait(3) else: new = -len(tweets) tweets.update(portion) new += len(tweets) err("Browsing %s statuses, new tweets: %i" % (screen_name if screen_name else "home", new)) if new < 190: break max_id = min(portion.keys())-1 # browse backwards fail = Fail()
def get_fitbit_data_pie(self,form,current_date,date_list,interval,keys): evdlist = self.personis_um.get_evidence_new(context=['Devices','Fitbit','Activity'], componentid=form) #---check the evidence list and match the date sedentary = light = moderate = intense = 0 my_list = [{'type':'sedentary','value':0},{'type':'light','value':0},{'type':'moderate','value':0},{'type':'intense','value':0}] val_list = [0,0,0,0] if evdlist: for ev in evdlist: try: #---Take the user given time or creation time if ev.time: um_time = ev.time else: um_time = ev.creation_time tt = time.localtime(int(um_time)) track_date = date(tt[0],tt[1],tt[2]) print "Track_date: %s"%str(track_date) #------ if evidence date is found in the datelist then add it to the dictionary found = 0 for dt in date_list: print "Chech date %s" % str(dt) if track_date == dt: print "Track_date: %s"%str(track_date) found = 1 if found == 1: if ev.comment == "sedentaryMinutes": val_list[0] = val_list[0] + ev.value print "Inside sedentary: Year:%d, Month:%d, Day:%d, Value: %d" %(tt[0],tt[1],tt[2],ev.value) elif ev.comment == "lightlyActiveMinutes": val_list[1] = val_list[1]+ev.value print "Inside light: Year:%d, Month:%d, Day:%d, Value: %d" %(tt[0],tt[1],tt[2],ev.value) elif ev.comment == "fairlyActiveMinutes": val_list[2] = val_list[2]+ev.value else: val_list[3] = val_list[3] + ev.value except Exception,e: print e data_dict = [] activemins = ["sedentaryMinutes","lightlyActiveMinutes","fairlyActiveMinutes","veryActiveMinutes"] for i in range(len(val_list)): taglist = list() taglist.append(activemins[i]) taglist.append((val_list[i]/7)) data_dictionary = dict(itertools.izip(keys, taglist)) data_dict.append(data_dictionary) print json.dumps(data_dict) return json.dumps(data_dict)
def NotPass(): reseaon=Reseaon.query.filter(Reseaon.UserName==request.values.get("UserName")).first() if reseaon == None: reseaon=Reseaon(UserName=request.values.get("UserName"),ReseaonContext=request.values.get("NotPassResult"),CreateTime=time.strftime('%Y-%m-%d',time.localtime(time.time())),Message="被驳回") else: reseaon.CreateTime.CreateTime=time.strftime('%Y-%m-%d',time.localtime(time.time())) reseaon.Message=u"被驳回" reseaon.ReseaonContext=request.values.get("NotPassResult") reseaon.save() expert_info=Expert_info.query.filter(Expert_info.UserName==request.values.get("UserName")).first() expert_info.Statue=u'已驳回' expert_info.save() return 'good'
def Abort(): t=time.localtime(time.time()-7200) reseaon=Reseaon.query.filter(Reseaon.UserName==request.values.get("UserName")).first() if reseaon == None: reseaon=Reseaon(UserName=request.values.get("UserName"),ReseaonContext=request.values.get("NotPassResult"),CreateTime=time.strftime('%Y-%m-%d',t),Message="被中止") else: reseaon.CreateTime=time.strftime('%Y-%m-%d',t) reseaon.Message=u"被中止" reseaon.ReseaonContext=request.values.get("NotPassResult") reseaon.save() expert_info=Expert_info.query.filter(Expert_info.UserName==request.values.get("UserName")).first() expert_info.Statue=u'失效' expert_info.ValidTime=time.strftime('%Y-%m-%d',t) expert_info.save() return time.strftime('%Y-%m-%d',t)
def get_fitbit_data_column(self,form,current_date,date_list,interval,keys): daycount = 1 #---check the evidence list and match the date print "Showing data for %s" % form datadict_list = [] evdlist = self.personis_um.get_evidence_new(context=['Devices','Fitbit','Activity'], componentid=form) if evdlist: for ev in evdlist: print ev.flags[0] if ev.flags[0] == "summary": try: #---Take the user given time or creation time if ev.time: um_time = ev.time else: um_time = ev.creation_time tt = time.localtime(int(um_time)) track_date = date(tt[0],tt[1],tt[2]) print "Track_date: %s"%str(track_date) #------ if evidence date is found in the datelist then add it to the dictionary print "Outside: Year:%d, Month:%d, Day:%d, Value: %d" %(tt[0],tt[1],tt[2],ev.value) found = 0 for dt in date_list: print "Chech date %s" % str(dt) if track_date == dt: print "Track_date: %s"%str(track_date) found = 1 if found == 1: #print datetime.datetime.fromtimestamp(int(um_time)).strftime('%Y-%m-%d'), ev.value #new_evdlist.append(int(ev.value)) tag_list=list() tag_list.append(tt[0]) tag_list.append(tt[1]) tag_list.append(tt[2]) tag_list.append(ev.value) print "Inside: Year:%d, Month:%d, Day:%d, Value: %d" %(tt[0],tt[1],tt[2],ev.value) data_dictionary = dict(itertools.izip(keys, tag_list)) datadict_list.append(data_dictionary) #prev_date = prev_date + timedelta(days=1) #daycount = daycount + 1 except Exception,e: print e print json.dumps(datadict_list) return json.dumps(datadict_list)
def get_evidence_fitbit(context, componentid, start, end): evdlist = self.get_evidence_new(context, component) days_in_between = str(end - start).split(' ') interval = int(days_in_between[0]) print interval margin = datetime.timedelta(days = interval) keys = ('date','steps') for ev in evdlist: #print datetime.datetime.fromtimestamp(int(ev.time)).strftime('%Y-%m-%d'), ev.value try: um_time = ev.time import datetime tt = time.localtime(int(um_time)) track_date = date(tt[0],tt[1],tt[2]) if start <= track_date <= end: #datetime.datetime.fromtimestamp(int(time)).strftime('%Y-%m-%d') if prev_date != track_date: if newval != 0: new_evdlist.append(newval) datelist.append(track_date) print "UM data on %s is %s"%(str(track_date), newval) newval = int(ev.value) prev_date = track_date else: newval = newval + int(ev.value) except Exception,e: print e return "Error: "+str(e) dict_data = [] for i in range(len(new_evdlist)): tag_list=list() tag_list.append(datelist[i]) tag_list.append(new_evdlist[i]) data_dictionary = dict(itertools.izip(keys, tag_list)) dict_data.append(data_dictionary)
def write_data(self, d, kid): for item in d: import time image_id = item.id image_title = item.title image_description = item.description time_s = item.datetime image_datetime = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time_s)) image_views = item.views image_link = item.link album_name = kid updates = self.session.query(Messages).filter_by(album_name = album_name, image_link = image_link).all() if not updates: print "inserting new image URLs:", album_name, image_link upd = Messages(image_id, image_title, image_description, image_datetime, image_views, image_link, album_name) self.session.add(upd) else: if len(updates) > 1: print "Duplicate Warning" else: print "Duplicate images, Not inserting" self.session.commit()
def write_data(self, d, kid, page): for item in d: import time keyword = kid image_title = item.title time_s = item.datetime image_datetime = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time_s)) image_views= item.views image_link = item.link image_vote = item.vote pagenumber = page updates = self.session.query(Messages).filter_by(image_link = image_link, keyword = keyword).all() if not updates: print "inserting new image URLs:", image_link, pagenumber upd = Messages(keyword, image_title, image_datetime, image_views, image_link, image_vote, pagenumber) self.session.add(upd) else: if len(updates) > 1: print "Duplicate Warning" else: print "Duplicate images, Not inserting" self.session.commit()
def time_turn(): localtime = time.asctime(time.localtime()) print("当前默认日期时间格式:%s" % localtime) print(type(localtime)) # 格式化为:年-月-日 时:分:秒 星期几 print("24小时制全格式:", time.strftime("%Y-%m-%d %H:%M:%S %A", time.localtime())) print("12小时制缩写格式:", time.strftime("%Y-%m-%d %I:%M:%S %a", time.localtime())) # 带a.m. 或 p.m. 标识时间格式 %p print("带a.m或p.m 24小时制全格式:", time.strftime("%Y-%m-%d %H:%M:%S %p %A", time.localtime())) # 把时区也带上看看 %z print("带时区的全格式:", time.strftime("%Y-%m-%d %H:%M:%S %p %A %z", time.localtime())) # 格式乱排下试试 print("随意排格式:", time.strftime("%A %Y-%d-%m %p %H:%M:%S %z", time.localtime()))
def adminpanel(request): if request.user.is_authenticated(): articles=article.objects.all().order_by('-id') if request.POST: article(subject=request.POST.get('title'),preview=request.POST.get('image'),text=request.POST.get('TextArea1'),date=str(time.strftime("%d-%m-%Y", time.localtime()))).save() return redirect('/adminpanel') else: return render(request,'app/posting.html',{'articles':articles}) else: return redirect('/login')
%M 分钟数(00=59) %S 秒(00-59) %a 简写的星期名称 %A 完整星期名称 %b 简写的月份名称 %B 完整的月份名称 %c 相应的日期表示和时间表示 %j 年内的一天(001-366) %p A.M.或P.M.的等价符 %U 一年中的星期数(00-53)星期天为星期的开始 %w 星期(0-6),星期天为星期的开始 %W 一年中的星期数(00-53)星期一为星期的开始 %x 相应的日期表示 %X 相应的时间表示 %z 当前时区的名称 %% %号本身 """ #先查看当前默认格式化显示的时间 localtime=time.asctime(time.localtime()) print("当前默认日期时间格式是:%s"% localtime) # 格式化为: 年-月-日 时:分:秒 星期几 print("24小时制全格式:", time.strftime("%Y-%m-%d %H:%M:%S %A",time.localtime())) print("12小时制缩写格式:", time.strftime("%Y-%m-%d %H:%M:%S %a", time.localtime())) #带a.m 或p.m标识时间%p print("带a.m 或p.m24小时制全格式:",time.strftime("%Y-%m-%d %H:%M:%S %p %A",time.localtime())) # 把时区也带上看看 %z print("带时区的全格式:", time.strftime("%Y-%m-%d %H:%M:%S %p %A %z", time.localtime())) # 格式乱排下试试 print("随意排格式:", time.strftime("%A %Y-%d-%m %p %H:%M:%S %z", time.localtime()))
def get_cpu_mem_process(dev_list): dev_id_list = dev_list #dev_id_list=[[2012,"1012018093000149","生产","arm"],[2030,"1012019010301781","生产","arm"]] dev_date = { "2012": ["1012018093000149", "生产", "arm"], "2001": ["1012018071000020", "测试", "arm"], "1049": ["1012018071000014", "测试", "arm"], "2003": ["02c00081b27ae2bd", "测试", "nanopi"], "10061": ["1012019010301847", "测试", "arm"], "12051": ["1012019031900104", "测试", "arm"], "2017": ["02c0008166531e3f", "测试", "nanopi"], "2030": ["1012019010301781", "生产", "arm"], "3069": ["1012019031900102", "生产", "arm"] } save_log = operator_suite_log("auto_test_log", "get_cpu_mem_job") case_tm = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())) for i in range(len(dev_id_list)): dev_id = int(dev_id_list[i]) dev_union_id = dev_date[str(dev_id)][0] dev_url = dev_date[str(dev_id)][1] dev_hard_type = dev_date[str(dev_id)][2] # 设置SSH登入端口号 if len(str(dev_id)) < 5: sshid_org = "2" + str(dev_id) sshid = int(sshid_org) else: sshid = int(dev_id) # 获取打开FRP的url if dev_url == "测试": url = "http://10.4.32.114:8085/rrpc" elif dev_url == "生产": url = "http://w.vegcloud.xyz:8085/rrpc" # 设置SSH登入密码 if dev_hard_type == "arm": sshname = "linaro" else: sshname = "pi" getperformance = get_performance(dev_union_id, dev_id, url, "auto_test_log", sshid, sshname) start_frp = getperformance.start_app() if start_frp == "Pass": getdb = getperformance.get_date() if type(getdb) == type((1, 2)): wrt_cpu = save_log.write_cpu_date( str(dev_id), getdb[0]["door"], getdb[0]["weigh"], getdb[0]["weighcv_cm"], getdb[0]["weighcv_as"], getdb[0]["iotkit"], getdb[0]["apps"], getdb[0]["alarm"], getdb[0]["monitor"], getdb[0]["tts"], case_tm) wrt_mem = save_log.write_mem_date( str(dev_id), getdb[1]["door"], getdb[1]["weigh"], getdb[1]["weighcv_cm"], getdb[1]["weighcv_as"], getdb[1]["iotkit"], getdb[1]["apps"], getdb[1]["alarm"], getdb[1]["monitor"], getdb[1]["tts"], case_tm) wrt_mem_summary = save_log.write_mem_summary_date( str(dev_id), getdb[2]["mem_total"], getdb[2]["mem_free"], getdb[2]["mem_use"], getdb[2]["mem_buff"], getdb[2]["remaind_space"], case_tm) if wrt_cpu == "Pass" and wrt_mem == "Pass" and wrt_mem_summary == "Pass": save_log.write_case(dev_id, "get_cpu_mem_date", "pass", "pass", case_tm) else: save_log.write_case(dev_id, "get_cpu_mem_date", "Fail", "write db fail", case_tm) else: save_log.write_case(dev_id, "get_cpu_mem_date", "Fail", getdb, case_tm) else: save_log.write_case(dev_id, "get_cpu_mem_date", "Fail", start_frp, case_tm)
def CM_Get_Sku(dev_id_list, env): dev_id = dev_id_list[0] save_log = operator_suite_log("auto_test_log", "CM_Get_Sku") counter_error = 0 for lp in range(5): try: hostid = "m.vegcloud.tech" pwd = "Ustaff201" sshname = "linaro" if len(str(dev_id)) == 5: sshid = dev_id else: sshid = "2" + str(dev_id) myclient = paramiko.SSHClient() myclient.set_missing_host_key_policy(paramiko.AutoAddPolicy()) myclient.connect(hostid, port=int(sshid), username=sshname, password=pwd, allow_agent=False, look_for_keys=False) time.sleep(5) stdin, stdout, stderr = myclient.exec_command("sudo bash") time.sleep(2) except: time.sleep(10) else: int_id = time.strftime('%Y-%m-%d', time.localtime(time.time())) trans_id = int(int_id.replace("-", "")) while True: case_tm = time.strftime('%Y-%m-%d', time.localtime(time.time())) cmd_open_door = "curl -X POST \\" + "http://localhost:8086/door/1/open \\" + "-H \'Content-Type: application/json\' \\" + "-H \'Postman-Token: bb06b4e0-9459-40a4-b875-b39354cbb18a\' \\" + "-H \'cache-control: no-cache\' \\" + "-d \'{\"transid\":" + '"' + str( trans_id ) + '"' + ",\"lockid\": \"1-1-1\",\"user_type\": 1}\'" stdin, stdout, stderr = myclient.exec_command(cmd_open_door) time.sleep(20) open_lock_status_org = stdout.read().decode("utf-8") open_lock_status = json.loads(open_lock_status_org) if open_lock_status["msg"].lower() != "success": save_log.write_case(dev_id, "open_lock_fail", "Fail", open_lock_status["output"], case_tm) cmd = "cat " + "/vbg/root/weighcv-cm/log/" + str( trans_id) + ".txt" stdin, stdout, stderr = myclient.exec_command(cmd) open_goods_info_layer_1 = [] open_goods_info_layer_2 = [] open_goods_info_layer_3 = [] open_goods_info_layer_4 = [] close_goods_info_layer_1 = [] close_goods_info_layer_2 = [] close_goods_info_layer_3 = [] close_goods_info_layer_4 = [] open_goods_date = [] close_doods_date = [] while True: line = stdout.readline() if line: open_goods_info_org = re.findall( "CVOpenDoorDo OpenObjDetectInfo detectinfo=\s+(.+)", line) close_goods_info_org = re.findall( "CVCloseDoorDo CloseObjDetectInfo detectinfo=\s+(.+)", line) if open_goods_info_org: open_goods_info = open_goods_info_org[0] open_goods_info_list = json.loads(open_goods_info) for j in range(len(open_goods_info_list)): if open_goods_info_list[j][ "weighid"] == "010101": open_goods_info_layer_1_org = open_goods_info_list[ j]["goodsinfo"] for j in range( len(open_goods_info_layer_1_org)): open_goods_info_layer_1_sub = {} open_goods_info_layer_1_sub = { open_goods_info_layer_1_org[j]["goodsid"]: open_goods_info_layer_1_org[j] ["goodsnum"] } open_goods_info_layer_1.append( open_goods_info_layer_1_sub) open_goods_date.append( open_goods_info_layer_1) elif open_goods_info_list[j][ "weighid"] == "010201": open_goods_info_layer_2_org = open_goods_info_list[ j]["goodsinfo"] for j in range( len(open_goods_info_layer_2_org)): open_goods_info_layer_2_sub = {} open_goods_info_layer_2_sub = { open_goods_info_layer_2_org[j]["goodsid"]: open_goods_info_layer_2_org[j] ["goodsnum"] } open_goods_info_layer_2.append( open_goods_info_layer_2_sub) open_goods_date.append( open_goods_info_layer_2) elif open_goods_info_list[j][ "weighid"] == "010301": open_goods_info_layer_3_org = open_goods_info_list[ j]["goodsinfo"] for j in range( len(open_goods_info_layer_3_org)): open_goods_info_layer_3_sub = {} open_goods_info_layer_3_sub = { open_goods_info_layer_3_org[j]["goodsid"]: open_goods_info_layer_3_org[j] ["goodsnum"] } open_goods_info_layer_3.append( open_goods_info_layer_3_sub) open_goods_date.append( open_goods_info_layer_3) elif open_goods_info_list[j][ "weighid"] == "010401": open_goods_info_layer_4_org = open_goods_info_list[ j]["goodsinfo"] for j in range( len(open_goods_info_layer_4_org)): open_goods_info_layer_4_sub = {} open_goods_info_layer_4_sub = { open_goods_info_layer_4_org[j]["goodsid"]: open_goods_info_layer_4_org[j] ["goodsnum"] } open_goods_info_layer_4.append( open_goods_info_layer_4_sub) open_goods_date.append( open_goods_info_layer_4) if close_goods_info_org: close_goods_info = close_goods_info_org[0] close_goods_info_list = json.loads( close_goods_info) for j in range(len(close_goods_info_list)): if close_goods_info_list[j][ "weighid"] == "010101": close_goods_info_layer_1_org = close_goods_info_list[ j]["goodsinfo"] for j in range( len(close_goods_info_layer_1_org)): close_goods_info_layer_1_sub = {} close_goods_info_layer_1_sub = { close_goods_info_layer_1_org[j]["goodsid"]: close_goods_info_layer_1_org[j] ["goodsnum"] } close_goods_info_layer_1.append( close_goods_info_layer_1_sub) close_goods_date.append( close_goods_info_layer_1) elif close_goods_info_list[j][ "weighid"] == "010201": close_goods_info_layer_2_org = close_goods_info_list[ j]["goodsinfo"] for j in range( len(close_goods_info_layer_2_org)): close_goods_info_layer_2_sub = {} close_goods_info_layer_2_sub = { close_goods_info_layer_2_org[j]["goodsid"]: close_goods_info_layer_2_org[j] ["goodsnum"] } close_goods_info_layer_2.append( close_goods_info_layer_2_sub) close_goods_date.append( close_goods_info_layer_2) elif close_goods_info_list[j][ "weighid"] == "010301": close_goods_info_layer_3_org = close_goods_info_list[ j]["goodsinfo"] for j in range( len(close_goods_info_layer_3_org)): close_goods_info_layer_3_sub = {} close_goods_info_layer_3_sub = { close_goods_info_layer_3_org[j]["goodsid"]: close_goods_info_layer_3_org[j] ["goodsnum"] } close_goods_info_layer_3.append( close_goods_info_layer_3_sub) close_goods_date.append( close_goods_info_layer_3) elif close_goods_info_list[j][ "weighid"] == "010401": close_goods_info_layer_4_org = close_goods_info_list[ j]["goodsinfo"] for j in range( len(close_goods_info_layer_4_org)): close_goods_info_layer_4_sub = {} close_goods_info_layer_4_sub = { close_goods_info_layer_4_org[j]["goodsid"]: close_goods_info_layer_4_org[j] ["goodsnum"] } close_goods_info_layer_4.append( close_goods_info_layer_4_sub) close_goods_date.append( close_goods_info_layer_4) else: break for open_goods_info_layer in open_goods_date: if not open_goods_info_layer: counter_error += 1 save_log.write_case(dev_id, "open_detect", "Fail", str(trans_id), case_tm) for close_goods_info_layer in close_doods_date: if not close_goods_info_layer: counter_error += 1 save_log.write_case(dev_id, "open_detect", "Fail", str(trans_id), case_tm) if counter_error > 10: break time.sleep(120) trans_id += 1 time.sleep(30) print(str(lp) + "次SSH登入失败")
from datetime import timedelta t1 = timedelta(seconds=33) t2 = timedelta(seconds=54) t3 = t1 - t2 print("t3 =", t3) # t3 = -1 day, 23:59:39 print("t3 =", abs(t3)) # t3 = 0:00:21 t = timedelta(days=5, hours=1, seconds=33, microseconds=233423) print("total seconds =", t.total_seconds()) # total seconds = 435633.233423 # Time module import time t = time.localtime() current_time = time.strftime("%H:%M:%S", t) print(current_time) # 07:46:58 # time.sleep(): suspends/delays execution of the current thread for given # of seconds print("This is printed immediately.") time.sleep(2.4) print("This is printed after 2.4 seconds.") named_tuple = time.localtime() # get struct_time time_string = time.strftime("%m/%d/%Y, %H:%M:%S", named_tuple) print(time_string) # 12/28/2018, 09:47:41 # Format Datetime
def json_statistics(request): logger.debug("parsing logs") utils.exec_upri_config('parse_logs') # bar chart monthly = [[0] * 5, [0] * 5] now = time.localtime() months = [ _localdate( datetime.fromtimestamp( time.mktime( (now.tm_year, now.tm_mon - n, 1, 0, 0, 0, 0, 0, 0))), "F") for n in reversed(range(5)) ] months_nr = [ _localdate( datetime.fromtimestamp( time.mktime( (now.tm_year, now.tm_mon - n, 1, 0, 0, 0, 0, 0, 0))), "n") for n in reversed(range(6)) ] for i in range(5): dnsmasq_key = __DELIMITER.join( (__PREFIX, __DNSMASQ, __BLOCKED, __MONTH, str(months_nr[i + 1]))) monthly[0][i] = int(redis.get(dnsmasq_key) or 0) privoxy_key = __DELIMITER.join( (__PREFIX, __PRIVOXY, __BLOCKED, __MONTH, str(months_nr[i + 1]))) monthly[1][i] = int(redis.get(privoxy_key) or 0) # lists all_filtered_pages = list() for key in redis.scan_iter( __DELIMITER.join((__PREFIX, __PRIVOXY, __BLOCKED, __DOMAIN, "*"))): site = key.replace( __DELIMITER.join((__PREFIX, __PRIVOXY, __BLOCKED, __DOMAIN)) + ":", "") all_filtered_pages.append({ "url": site, "count": int(redis.get(key) or 0) }) filtered_pages = sorted(all_filtered_pages, key=lambda k: k['count'], reverse=True)[:5] all_blocked_pages = list() for key in redis.scan_iter( __DELIMITER.join((__PREFIX, __DNSMASQ, __BLOCKED, __DOMAIN, "*"))): site = key.replace( __DELIMITER.join((__PREFIX, __DNSMASQ, __BLOCKED, __DOMAIN)) + ":", "") all_blocked_pages.append({ "url": site, "count": int(redis.get(key) or 0) }) blocked_pages = sorted(all_blocked_pages, key=lambda k: k['count'], reverse=True)[:5] # pi charts sum_adfree_sixmonths = 0 sum_blocked_sixmonths = 0 for i in range(6): blocked_key = __DELIMITER.join( (__PREFIX, __DNSMASQ, __BLOCKED, __MONTH, str(months_nr[i]))) sum_blocked_sixmonths += int(redis.get(blocked_key) or 0) adfree_key = __DELIMITER.join( (__PREFIX, __DNSMASQ, __ADFREE, __MONTH, str(months_nr[i]))) sum_adfree_sixmonths += int(redis.get(adfree_key) or 0) today = datetime.now().date().strftime('%Y-%m-%d') # return 0 if key does not exist sum_blocked_today = int( redis.get( __DELIMITER.join((__PREFIX, __DNSMASQ, __BLOCKED, __DAY, today))) or 0) sum_adfree_today = int( redis.get( __DELIMITER.join((__PREFIX, __DNSMASQ, __ADFREE, __DAY, today))) or 0) pie1_data = [sum_adfree_sixmonths, sum_blocked_sixmonths] pie2_data = [sum_adfree_today, sum_blocked_today] return HttpResponse(json.dumps({ 'pie1_data': { 'series': pie1_data }, 'pie2_data': { 'series': pie2_data }, 'filtered_pages': filtered_pages, 'blocked_pages': blocked_pages, 'bar_data': { 'labels': months, 'series': monthly } }), content_type="application/json")
def enrich_single_stock( predict_date , stock_index ): try: "Check if the predictive Day is trading day, if so continue, otherwise just return None" if_trading_day = check_if_tradingday(predict_date,stock_index) if if_trading_day is False: return None predictiveResults = {} finalRatio = {} clusterProbability = {} predictiveProbability = 0 stockDerived = [] newsDerived = [] "Iteratively compute the probabilty of each cluster for the stock " cluster_pro_list = CONFIG["clusterProbability"][stock_index] for cluster_type in cluster_pro_list: "compute the contribution of 3 past day's trend " stockIndexProbability,stockDerived = compute_stock_index_probability(predict_date, cluster_type , stock_index ) "compute the contribution of 3 past day's news" newsProbability,newsDerived = compute_stock_news_probability(predict_date, cluster_type , stock_index ) "combine two contribution together" predictiveProbability = math.exp( stockIndexProbability + newsProbability ) * float( 1e90 ) predictiveResults[cluster_type] = predictiveProbability sumProbability = sum( predictiveResults.itervalues() ) "Get the maximum probability between the predictive values" for item_key, item_value in predictiveResults.iteritems(): finalRatio[item_key] = item_value / sumProbability sorted_ratio = sorted( finalRatio.iteritems(), key = operator.itemgetter( 1 ), reverse = True ) clusterProbability[stock_index] = {} clusterProbability[stock_index][predict_date] = sorted_ratio[0] "Construct the Surrogate data" surrogateData = {} date = time.strftime('%Y-%m-%d',time.localtime(time.time())) "Merge News Derived and Stock Derived" derivedFrom = [] for item in stockDerived: derivedFrom.append(item) for item in newsDerived: derivedFrom.append(item) "construct surrogate data" model = 'Bayesian - Time serial Model' location = CONFIG["location"][stock_index] population = stock_index confidence = sorted_ratio[0][1] confidenceIsProbability = True shiftType = "Trend" valueSpectrum = "changePercent" strength = sorted_ratio[0][0] shiftDate = predict_date surrogateData["date"] = date surrogateData["derivedFrom"] = derivedFrom surrogateData["model"] = model surrogateData["location"] = location surrogateData["population"] = population surrogateData["confidence"] = confidence surrogateData["confidenceIsProbability"] = confidenceIsProbability surrogateData["shiftType"] = shiftType surrogateData["valueSpectrum"] = valueSpectrum surrogateData["strength"] = strength surrogateData["shiftDate"] = shiftDate "Generate Embers Id" jsonStr = json.dumps(surrogateData) embersId = hashlib.sha1(json.dumps(jsonStr)).hexdigest() surrogateData["embersId"] = embersId "Insert the surrogatedata to simple DB" domain_name = "finance_surrogatedata" domain = get_domain(domain_name) domain.put_attributes(embersId,surrogateData) #push surrodate data into ZMQ with queue.open(PORT, 'w', capture=True) as outq: outq.write(surrogateData) return surrogateData except Exception as e: log.info( "Error: %s" % e.args)
def date_test(): from datetime import date, time, datetime, timedelta import time import locale print('\n---- datetime') # -------- now = datetime.now() day = datetime(2018, 11, 30, 14) print('now:', now) print('day:', day) print() # -------- # === format === # %Y-%m-%d %H:%M:%S # %Y -> 2019, %y -> 19 # %B -> January, %b -> Jan, %A -> Sunday, %a -> Sun # %w -> [week], %Z -> UTC # ============== str_to_date = datetime.strptime('2017-06-21', '%Y-%m-%d') date_to_str = day.strftime('%Y-%m-%d %H:%M:%S') date_to_timestamp = day.timestamp() timestamp_to_date = datetime.fromtimestamp(1543582800) print('str_to_date:', str_to_date) print('date_to_str:', date_to_str) print('date_to_timestamp:', date_to_timestamp) print('timestamp_to_date:', timestamp_to_date) print() # -------- time_offset = now + timedelta(microseconds=0, milliseconds=0, seconds=0, minutes=0, hours=2, days=1, weeks=0) print('time_offset: {}, now: {}'.format(time_offset, now)) sec_diff = datetime.strptime('2018-07-18 10:06:31', '%Y-%m-%d %H:%M:%S').timestamp() - datetime.strptime('2018-02-11 18:27:26', '%Y-%m-%d %H:%M:%S').timestamp() date_diff = datetime(2018, 11, 30) - datetime(2018, 10, 7) print('seconds diff [2018-07-18 10:06:31 - 2018-02-11 18:27:26]:', sec_diff) print('date_diff [2018-11-30 - 2018-10-07]: "{}" => days: {}, seconds: {}, microseconds: {}; total_seconds: {}'.format(date_diff, date_diff.days, date_diff.seconds, date_diff.microseconds, date_diff.total_seconds())) print('\n---- time') # -------- str_to_time_struct = time.strptime('2017-06-21', '%Y-%m-%d') time_struct_to_str = time.strftime('%Y-%m-%d %H:%M:%S', str_to_time_struct) time_struct_to_timestamp = time.mktime(str_to_time_struct) now_timestamp = time.time() timestamp_to_time_struct = time.localtime(now_timestamp) print('str_to_time_struct:', str_to_time_struct) print('time_struct_to_str:', time_struct_to_str) print('time_struct_to_timestamp:', time_struct_to_timestamp) print('now_timestamp:', now_timestamp) print('timestamp_to_time_struct:', timestamp_to_time_struct) print() # -------- _date = now.date() print(f'datetime to date: {now} => {_date}') print()
print(today_date) new_today_date = today_date.strftime("%d/%m/%Y") print(new_today_date) #Future date import datetime x = datetime.datetime(2019, 12, 31) print(x) import time epoch = time.time() print("epoch", epoch) t = time.localtime(epoch) print('local time', t) d = t.tm_mday m = t.tm_mon y = t.tm_year print('Current date is %d-%d-%d' % (d, m, y)) te = time.ctime(epoch) print('Ctime : epoch ', te) t = time.ctime() print('ctime ', t) from datetime import * now = datetime.now() print(now) print('Date now {}/{}/{}'.format(now.day, now.month, now.year))
#time – Time independent of the day (Hour, minute, second, microsecond) #datetime – Combination of time and date (Month, day, year, hour, second, microsecond) #timedelta— A duration of time used for manipulating dates #tzinfo— An abstract class for dealing with time zones # from datetime import date from datetime import time from datetime import datetime #### Time module import time ticks = time.time() print "Number of ticks since 12:00am, January 1, 1970:", ticks #Getting current time localtime = time.localtime(time.time()) print "Local current time :", localtime #Getting formatted time localtime = time.asctime( time.localtime(time.time()) ) print "Local current time :", localtime ### Calender Module #Getting calendar for a month import calendar cal = calendar.month(2008, 1) print "Here is the calendar:" print cal
Lokalna data: 12/07/20 Lokalny czas: 13:53:13 Czas obecny: 01:55:02 PM 24-godz. czas: 13:57 """ >>> >>> import time >>> import sys >>> print(sys.version) 3.8.5 (tags/v3.8.5:580fbb0, Jul 20 2020, 15:43:08) [MSC v.1926 32 bit (Intel)] >>> >>> print(time.perf_counter(), time.localtime(time.perf_counter())) 89.4915672 time.struct_time(tm_year=1970, tm_mon=1, tm_mday=1, tm_hour=1, tm_min=1, tm_sec=29, tm_wday=3, tm_yday=1, tm_isdst=0) >>> >>> import time >>> time.time() 1614326427.3598132 >>> time.localtime(time.time()) time.struct_time(tm_year=2021, tm_mon=2, tm_mday=26, tm_hour=9, tm_min=0, tm_sec=57, tm_wday=4, tm_yday=57, tm_isdst=0) >>> time.asctime(time.localtime(time.time())) 'Fri Feb 26 09:01:38 2021' >>> time.localtime(time.clock()) Traceback (most recent call last): File "<pyshell#4>", line 1, in <module> time.localtime(time.clock()) AttributeError: module 'time' has no attribute 'clock'
%a 简写的星期名称 %A 完整星期名称 %b 简写的月份名称 %B 完整的月份名称 %c 相应的日期表示和时间表示 %j 年内的一天(001-366) %p A.M.或P.M.的等价符 %U 一年中的星期数(00-53)星期天为星期的开始 %w 星期(0-6),星期天为星期的开始 %W 一年中的星期数(00-53)星期一为星期的开始 %x 相应的日期表示 %X 相应的时间表示 %z 当前时区的名称 %% %号本身 """ #先查看当前默认格式化显示的时间 localtime = time.asctime(time.localtime()) print("当前默认日期时间格式是:%s" % localtime) # 格式化为: 年-月-日 时:分:秒 星期几 print("24小时制全格式:", time.strftime("%Y-%m-%d %H:%M:%S %A", time.localtime())) print("12小时制缩写格式:", time.strftime("%Y-%m-%d %H:%M:%S %a", time.localtime())) #带a.m 或p.m标识时间%p print("带a.m 或p.m24小时制全格式:", time.strftime("%Y-%m-%d %H:%M:%S %p %A", time.localtime())) # 把时区也带上看看 %z print("带时区的全格式:", time.strftime("%Y-%m-%d %H:%M:%S %p %A %z", time.localtime())) # 格式乱排下试试 print("随意排格式:", time.strftime("%A %Y-%d-%m %p %H:%M:%S %z", time.localtime()))
print('_' * 30) # 时间 today_now = datetime.now() print('现在时间是:%s' % today_now) # 用time造个时间 t = time(hour=12, minute=20, second=30, microsecond=200) print('自己造的时间是:%s' % t) # 再造个日期出来 d = datetime(year=2018, month=4, day=22, hour=17, minute=7, second=30) print('自己造的日期是:%s' % d) print('-' * 30) import time localtime = time.asctime(time.localtime()) print('当前默认时间日期格式是:%s' % localtime) # 格式:年-月-日 时:分:秒 星期几 print('24小时制格式:', time.strftime('%Y-%m-%d %H:%M:%S %A', time.localtime())) print('12小时制格式:', time.strftime('%Y-%m-%d %I:%M:%S %a', time.localtime())) # 带a.m或p.m print('带a.m/p.m时间格式:', time.strftime('%Y-%m-%d %H:%M:%S %p %A', time.localtime())) # 带时区的时间格式(时区乱码没有解决) print('带时区的时间格式:', time.strftime('%Y-%m-%d %H:%M:%S %p %A %z', time.localtime()))
# %m 월 01 ~ 12 # %B 월 이름 January, ... # %b 월 축약이름 Jan, ... # %d 월의 일자 01 ~ 31 # %A 요일 이름 Sunday, # %a 요일 축약이름 Sun, ... # %H 24시간 00 ~ 23 # %I 12시간 01 ~ 12 # %p 오전/오후 AM, PM # %M 분 00 ~ 59 # %S 초 00 ~ 59 # 숫자는 자릿수에 맞춰 왼쪽에 0이 채워진다. # 다음은 time 모듈에서 제공하는 strftime() 함수다. 이것은 struct_time 객체를 문자열로 변환한다. 먼저 포맷문자열 fmt를 정의하고, 이것을 다시 사용하자 import time fmt = "It's %A, %B %d, %Y, local time %I:%M:%S%p" t = time.localtime() t # time.struct_time(tm_year=2017, tm_mon=10, tm_mday=26, tm_hour=15, tm_min=26, tm_sec=51, tm_wday=3, tm_yday=299, tm_isdst=0) time.strftime(fmt, t) # "It's Thursday, October 26, 2017, local time 03:26:51PM" 와 이건 존트 놀랍다. # 이것을 다음과 같이 date 객체에 사용하면 날짜부분만 작동한다. 그리고 시간은 기본값으로 지정된다. from datetime import date some_day = date(2015, 12, 12) fmt = "It's %B %d, %Y, local time %I:%M:%S%p" some_day.strftime(fmt) # "It's December 12, 2015, local time 12:00:00AM", date를 제외한 나머지 요소는 디폴트 값으로 # time 객체는 시간부분만 변환된다. from datetime import time some_time = time(10, 35)
# create a time t = time(hour=12, minute=20, second=30, microsecond=200) print(f"We create the time is {t}") # create a date d = datetime(year=2008, month=8, day=8, hour=8, minute=8, second=8) print(f"We create the date is {d}") print("-" * 30) import time if __name__ == '__main__': # time.strftime(format[, t]) # default format time localtime = time.asctime(time.localtime()) print(f"The default time format of current time is: {localtime}") # format it to year-month-day hour:minute:second weekday print("24hrs full format: ", time.strftime("%Y-%m-%d %H:%M:%S %A", time.localtime())) print("12hrs full format: ", time.strftime("%Y-%m-%d %I:%M:%S %a", time.localtime())) # time with a.m. or p.m. print("24 hrs full time with a.m. or p.m.: ",time.strftime("%Y-%m-%d %H:%M:%S %p %A", time.localtime())) # time with timezone print("Time with timezone: ", time.strftime("%Y-%m-%d %H:%M:%S %p %A %z", time.localtime())) # random format print("Random format: ", time.strftime("%A %Y-%d-%m %p %H:%M:%S %z", time.localtime())) print("-" * 30)
#print(Fore.RED + text) #print(Back.GREEN + text + Style.RESET_ALL) #print(text) doc = parse('RoadRestrictions.xml') for Closure in doc.findall('Closure'): #print (Closure.findtext('StartTime')) StartTime = int(Closure.findtext('StartTime'))/1000 #StartTime = time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(StartTime)) #StartTime = datetime.fromtimestamp(StartTime).strftime('%Y-%m-%d %H:%M:%S') StartTime = time.strftime("%a, %d %b %Y %H:%M:%S .0000", time.localtime(StartTime)) StartTime = parser.parse(StartTime) # print (StartTime) differenceInTime = StartTime.date() - Now.date() #rint(differenceInTime.days) if differenceInTime.days>0: lat = float(Closure.findtext('Latitude')) lon = float(Closure.findtext('Longitude')) district = Closure.findtext('District') #StartTime = Closure.findtext('StartTime') EndTime = Closure.findtext('EndTime') #StartTime = datetime.fromtimestamp(int(StartTime)/1000.) name = Closure.findtext('Name')
#再造个日期时间出来试试: d = datetime(year=2008, month=8, day=8, minute=8, second=8) print('造的日期时间为:%s' % d) ''' 格式化日期时间:strftime 函数 常用的时间格式如下: %y 两位数的年份(00-99) %Y 四位数的年份(000-9999) %m 月份(01-12) %d 月内的一天(0-31) %H 24小时制小时数(0-23) %I 12小时制的小时数(01-12) %M 分钟数(00-59) %S 秒(00-59) ''' #当前默认格式化时间: import time t = time.localtime() print('未格式化前的时间:t = %s' % t) print(u'当前默认日期时间格式:%s' % time.asctime(t)) #格式化为:年-月-日 时:分:秒 星期几 print(u'24小时制全格式:', time.strftime('%Y-%m-%d %H:%M:%S %A'), time.localtime()) print(u'12小时制缩写格式:', time.strftime('%Y-%m-%d %I:%M:%S %a', time.localtime())) #带a.m 或p.m 24小时制格式: print("带a.m或p.m 24小时制全格式:", time.strftime("%Y-%m-%d %H:%M:%S %p %A", time.localtime())) #格式乱排序: print('随意拍格式:', time.strftime("%A %Y-%d-%m %p %H:%M:%S %z", time.localtime()))
from datetime import time import time from django.utils import timezone from django.utils.timezone import now print(timezone.now, type(timezone)) print(time.localtime(time.time())) print(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()))
from datetime import datetime, date, time import time today = datetime.now() print "today" ,today print "year" ,today.year print "month" ,today.month print "day" ,today.day print "hour" ,today.hour print "min" ,today.minute print "sec" ,today.second timeSec = time.mktime(today.timetuple()) print "time sec",timeSec timeSec = timeSec+60 locTime = time.strftime("%Y-%m-%d %H:%M:%S",time.localtime(timeSec)) print "local time : ", locTime dt = datetime.strptime("2015-10-06 12:44:22", "%Y-%m-%d %H:%M:%S") sec1=time.mktime(dt.timetuple()) dt2 = datetime.strptime("2015-10-06 12:45:22", "%Y-%m-%d %H:%M:%S") sec2 = time.mktime(dt2.timetuple()) print "sec diff" ,sec2-sec1 print "time.time",time.time() delta1 = dt2-dt print "delta1", delta1
noon_today = datetime.combine(this_day, noon) print(noon_today) print(noon_today.date()) print(noon_today.time()) print() # 13.3 time 모듈 import time now = time.time() print(now) print(time.ctime(now)) print(time.localtime(now)) print(time.gmtime(now)) import time now = time.localtime() print(now) print(now[0]) print(list(now[x] for x in range(9))) # tm = time.localtime(now) # print(time.mktime(tm)) print() # 13.4 날자와 시간 읽고 쓰기 import time
def clockwatcher_main(): syslog.syslog("clockwatcherd: starting clockwatcher_main") lifetime = timedelta(days=1) #Notif expiration delta s = Signer("/etc/clockwatcher/shiny.private", "shiny") addrlist = [] updates = {} with open("/etc/clockwatcher/clockwatcherd.cfg", "r") as cfg: for line in cfg: addrlist.append(line[:-1]) #remembering to remove trailing \n while 1: # Synchronize to next whole minute starttime = time.localtime() time.sleep(60 - starttime.tm_sec) currtime = datetime.now() + timedelta( seconds=30) # Force rounding in case we're early timemsg = currtime.strftime("It is now %H:%M") notif = Notification(4, lifetime, timemsg, timemsg + " and all is well") # Need to add expiration here notif.prepare(s) # For now, minimizing the possibility of a collision between this daemon and new authorizations coming in # by reading the additional authorizations from a separate file and adding them on here. Only the daemon # touches the main clockwatcherd.cfg file. rewrite = False try: with open("/etc/clockwatcher/newwatchers.cfg", "r") as cfg: for line in cfg: newaddr = line if newaddr not in addrlist: #Handle unlikely duplicates addrlist.append(newaddr) rewrite = True except IOError: pass except: syslog.syslog( "clockwatcherd: Unknown error opening newwatchers file") quit() if rewrite: cfg = open("/etc/clockwatcher/newwatchers.cfg", "w") #Clobber newwatchers file cfg.close() with open("/etc/clockwatcher/clockwatcherd.cfg", "w") as cfg: #Update config with new watchers for idx in range(len(addrlist)): if addrlist[idx] != "": cfg.write(addrlist[idx]) cfg.write("\n") rewrite = False for idx in range(len(addrlist)): notaddr = addrlist[idx] if notaddr == "": continue if notaddr in updates: #update an existing notif if possible notid = updates[notaddr] status = notif.update(notid) if status == 404: #if 404 delete notid from updates del updates[notaddr] if notaddr not in updates: #not an else because it could have just been removed # TODO: Handle exceptions (can't connect, etc.) here (notid, status) = notif.send( notaddr ) #Need to get feedback on send failures, delete notaddr if status == 404: addrlist[ idx] = "" #Don't delete entry from addrlist inside loop, just blank it rewrite = True #Disk copy of list needs updating elif status == 200: updates[notaddr] = notid if rewrite: #Update disk copy of list, removing any blank addresses with open("/etc/clockwatcher/clockwatcherd.cfg", "w") as cfg: for idx in range(len(addrlist)): if addrlist[idx] != "": cfg.write(addrlist[idx]) cfg.write("\n")
def detail(): discount_id = int(request.args.get("id", 0)) if not discount_id: discount_id = int(request.args.get("did", 0)) do = request.args.get("do") discount = Discount.query.get_or_404(discount_id) # discount.count 每天0:00清零 TODO 脚本任务 left_count = discount.number - discount.count discount_shop_count = discount.shops.count() shop_photos = ShopPhoto.query.filter(ShopPhoto.brand_id == discount.brand_id) user_agent = request.headers.get('User-Agent') curr_user = g.user # user的领券情况 # 该用户下领用的存在有效期的券(含使用或者未使用) curr_ticket_record = GetTicketRecord.query.filter(GetTicketRecord.user_id == curr_user.id, GetTicketRecord.discount_id == discount_id, GetTicketRecord.create_at >= datetime.datetime.now() - datetime.timedelta( days=discount.usable)) monday = datetime.datetime.now() - datetime.timedelta(days=datetime.datetime.now().weekday()) sunday = datetime.datetime.now() + datetime.timedelta(days=7 - datetime.datetime.now().weekday()) curr_ticket_records_week = curr_ticket_record.filter(GetTicketRecord.create_at >= monday, GetTicketRecord.create_at <= sunday).count() # print user_agent if do == 'post': if 'MicroMessenger' not in user_agent: return json.dumps({"message": "请在微信里操作", "redirect": "permit", "type": "tips"}) else: expire_datetime = discount.get_expire_datetime # expire_datetime_format = expire_datetime.strftime("%Y-%m%-%d") expire_datetime_format = str(expire_datetime.date()) record = GetTicketRecord.query.filter(GetTicketRecord.user_id == g.user.id, GetTicketRecord.discount_id == discount_id).first() if record: if record.status != 'expire': return json.dumps( {"message": {}, "redirect": "", "type": "error"}) openid = session['openid'] wechat = WechatBasic(appid=appid, appsecret=appsecret) # wechat.send_text_message(session['openid'], "test") # 调用公众号消息模板A0XK30w_sZPti5_gn33PJ5msng7yb71zAEcRa0E44oM发送领券通知 template_id = 'A0XK30w_sZPti5_gn33PJ5msng7yb71zAEcRa0E44oM' """{first.DATA}} 优惠券:{{keyword1.DATA}} 来源:{{keyword2.DATA}} 过期时间:{{keyword3.DATA}} 使用说明:{{keyword4.DATA}} {{remark.DATA}} """ json_data = { "first": { "value": "恭喜你领券成功!", "color": "#173177" }, "keyword1": { "value": discount.title, "color": "#173177" }, "keyword2": { "value": "网页获取", "color": "#173177" }, "keyword3": { "value": expire_datetime_format, "color": "#173177" }, "remark": { "value": "凭优惠券详情二维码领取", "color": "#173177" } } # 领取后需要写入到get_discount_record 表 # TODO 下次是否能领取则通过这张表的数据来判断 year = time.strftime("%Y", time.localtime())[2:] # TODO 根据时间戳生成唯一id,可能有点不规范 code = year + str(time.time())[4:-3] record = GetTicketRecord(user_id=g.user.id, discount_id=discount_id, code=code) db.session.add(record) discount.count = discount.count + 1 db.session.add(discount) db.session.commit() url = current_app.config.get('SITE_DOMAIN') + ( url_for('shop.checkout', discount_id=discount_id, record_id=record.id)) wechat.send_template_message(openid, template_id, json_data, url) # still 表示本周还能领多少张 TODO 静态数据需要替换 # allow 表示本周允许领取多少张 still = discount.number * discount.usable - 1 # 获取永久二维码 scene_id前缀12表示是优惠券类型的二维码 wechat = WechatBasic(appid=current_app.config.get('WECHAT_APPID'), appsecret=current_app.config.get('WECHAT_APPSECRET')) data = {"action_name": "QR_LIMIT_SCENE", "action_info": {"scene": {"scene_id": int(str("12") + str(record.id))}}} get_ticket_data = wechat.create_qrcode(data) ticket = get_ticket_data.get("ticket") session['ticket'] = ticket # 写入数据库 record.ticket = ticket db.session.add(record) db.session.commit() return json.dumps( {"message": {"still": still, "allow": 1, "tid": record.id, "ctime": "156151515"}, "redirect": "", "type": "success"}) # other discount in the discount other_discounts = Discount.query.filter(Discount.id != discount_id, Discount.brand_id == discount.brand_id) shops = discount.shops.all() return render_template('discount/detail.html', discount=discount, discount_shop_count=discount_shop_count, discount_id=discount_id, left_count=left_count, other_discounts=other_discounts, shop_photos=shop_photos, shops=shops, curr_ticket_record=curr_ticket_record.first(), curr_ticket_records_week=curr_ticket_records_week)
def format_time(time_sj): # 时间戳转换正常时间 data_sj = time.localtime(time_sj) time_str = time.strftime("%Y-%m-%d %H:%M:%S",data_sj) return time_str
d = datetime(year=2008,month=8,day=8,minute=8,second=8) print ('造的日期时间为:%s'%d) ''' 格式化日期时间:strftime 函数 常用的时间格式如下: %y 两位数的年份(00-99) %Y 四位数的年份(000-9999) %m 月份(01-12) %d 月内的一天(0-31) %H 24小时制小时数(0-23) %I 12小时制的小时数(01-12) %M 分钟数(00-59) %S 秒(00-59) ''' #当前默认格式化时间: import time t = time.localtime() print ('未格式化前的时间:t = %s'%t) print (u'当前默认日期时间格式:%s'%time.asctime(t)) #格式化为:年-月-日 时:分:秒 星期几 print (u'24小时制全格式:',time.strftime('%Y-%m-%d %H:%M:%S %A'),time.localtime()) print (u'12小时制缩写格式:',time.strftime('%Y-%m-%d %I:%M:%S %a',time.localtime())) #带a.m 或p.m 24小时制格式: print ("带a.m或p.m 24小时制全格式:",time.strftime("%Y-%m-%d %H:%M:%S %p %A",time.localtime())) #格式乱排序: print ('随意拍格式:',time.strftime("%A %Y-%d-%m %p %H:%M:%S %z",time.localtime()))
def format_time_simple(time_sj): # 文件名专用 data_sj = time.localtime(time_sj) time_str = time.strftime("%Y-%m-%d-%H:%M:%S", data_sj) return time_str
def AddWebBookMark(self, folder , article_name , article_title , article_url ,article_desc): """Invoke Jmeter passing the right arguments .""" article_name , article_title , article_url ,article_desc = re.sub('\n' , '<BR/>' , article_name) ,re.sub('\n' , '<BR/>' ,article_title) , re.sub('\n' , '' ,article_url) ,re.sub('\n' , '<BR/>' ,article_desc) cmd = "C:/java/apache-jmeter-2.6/bin/jmeter.bat -Jinsta_user=\"%s\" -Jinsta_password=\"%s\" -Jfname=\"%s\" -Jarticle_name=\"%s\" -Jarticle_title=\"%s\" -Jarticle_url=\"%s\" -Jarticle_desc=\"%s\" -n -t InstaP.jmx -l log_%s.jtl " %( self.insta_user , self.insta_password , folder , article_name , article_title , article_url ,article_desc , time.strftime("%Y%m%d%H%M%S", time.localtime()) ) print cmd os.system(cmd)
def parse_content(self, response): item = CommonItem() item['link'] = response.url # 详情内容页面链接 # 标题 detail_title = response.xpath( "//div[@class='container clearfix']/div[@class='search_fl_Content']/div[@class='fTitle']/text()" ).extract_first() detail_url = response.xpath( "//div[@class='search_fl_Content']/div[@class='top-10'] /div[@class='title trim']/a/@href" ) # 数据来源 栏目 detail_column = response.xpath( "//div[@class='container clearfix']/div[@class='search_fl_Content']/div[@class='top-10']/div[@class='fPost']/a[3]/text()" ).extract_first() # 部分页面html不一样 other_detail_column = response.xpath( "//div[@class='container clearfix']/div[@class='search_fl_Content']/div[@class='top-10']/div[@class='fPost']/a/text()" ).extract_first() # 发文机构 detail_organ = response.xpath( "//div[@class='container clearfix']/div[@class='search_fl_Content']/div[@class='flai']/span[1]/text()" ).extract_first() # 发文作者 detail_author = response.xpath( "//div[@class='container clearfix']/div[@class='search_fl_Content']/div[@class='flai']/span[2]/text()" ).extract_first() # 发文时间 detail_birth = response.xpath( "//div[@class='container clearfix']/div[@class='search_fl_Content']/div[@class='flai']/span[3]/text()" ).extract_first() # 文章内容 detail_content = response.xpath( "//div[@class='container clearfix']/div[@class='search_fl_Content']/div[@class='fcontent']/div[@class='contentBoxF']/p|//div[@class='container clearfix']/div[@class='search_fl_Content']/div[@class='fcontent']/div[@class='contentBoxF']/div" ) item['title'] = detail_title # print(bgzi_info_item) if detail_column is not None: item['column'] = detail_column else: item['column'] = other_detail_column # print(bgzi_info_item) # if detail_source is not None: item['organ'] = detail_organ # print(bgzi_info_item) item["dataOriginId"] = self.origin_id # if detail_author is not None: item['author'] = detail_author # print(bgzi_info_item) # if detail_birth is not None: item['birth'] = detail_birth # print(bgzi_info_item) # if detail_content is not None: detail_content_str = "" # 图片路径相对路径转换为绝对路径 for content in detail_content: old_src = content.xpath( "./img/@src |./strong/img/@src ").extract_first() if old_src is not None: new_src = response.urljoin(old_src) detail_content_str = detail_content_str + content.extract( ).replace(old_src, new_src) else: detail_content_str = detail_content_str + content.extract() item['content'] = detail_content_str item['date'] = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) # fo = open("rawcodes.txt", "wb") # fo.write((item, -1)) # # # 关闭打开的文件 # fo.close() yield item
from datetime import datetime,time import time while True: print(87*'=') print(''' /$$$$$$ /$$ /$$ /$$$$$$ /$$$$$ /$$ /$$ /$$$$$ /$$ /$$ /$$$$$$$ /$$__ $$| $$ /$$//$$__ $$ |__ $$| $$ | $$ |__ $$| $$ | $$| $$__ $$ | $$ \ $$ \ $$ /$$/| $$ \ $$ | $$| $$ | $$ | $$| $$ | $$| $$ \ $$ | $$$$$$$$ \ $$$$/ | $$ | $$ | $$| $$ | $$ | $$| $$ | $$| $$$$$$$/ | $$__ $$ \ $$/ | $$ | $$ /$$ | $$| $$ | $$ /$$ | $$| $$ | $$| $$__ $$ | $$ | $$ | $$ | $$ | $$ | $$ | $$| $$ | $$| $$ | $$| $$ | $$| $$ \ $$ | $$ | $$ | $$ | $$$$$$/ | $$$$$$/| $$$$$$/| $$$$$$/| $$$$$$/| $$ | $$ |__/ |__/ |__/ \______/ \______/ \______/ \______/ \______/ |__/ |__/ ''') print(87*'=') jamLokal = time.asctime(time.localtime(time.time())) jam = time.localtime().tm_hour jamLokal = time.asctime(time.localtime(time.time())) jam = time.localtime().tm_hour print("=" * 29 + "-|" + jamLokal + "|-" + "=" * 30) if 0 <= jam <= 6: print("=" * 34 + "-<|Selamat Subuh|>-" + "=" * 34) elif 6 <= jam <= 12: print("=" * 35 + "-<|Selamat Pagi|>-" + "=" * 35) elif 12 <= jam <= 14: print("=" * 34 + "-<|Selamat Siang|>-" + "=" * 34)
async def createBasicWords(*, projectName: str = Path(...), newBasicWords: List[NewBasicWords]): # projectName 转 projectId newBasicWords = [ newBasicWordsitem.dict() for newBasicWordsitem in newBasicWords ] print(projectName, newBasicWords) # 添加一些 计算属性 for ele in newBasicWords: ele['timestamp'] = time.strftime("%Y/%m/%d %H:%M:%S", time.localtime()) ele['Length'] = len(ele['word']) ele['status'] = '已添加' # 默认已添加 if ele['source'] == '手动添加' or ele['source'] == '本地文件': # 手动输入默认 权重为 0 ele['weight'] = 0 # 计算状态 # 手动输入状态,有以下结果选项: 无效(已经在无效词典中),停止(在停止词典中),已添加(在当前表中,但是还没有拓词),(前三个需要 启动API查询),新词 # 将 source 变成列表, 因为不支持 集合 ele['source'] = [ele['source'].lower()] # print(newBasicWords) projectId = await findProjectIdFromProjectName( dbPrefix, 'Project', queryDict={'projectName': projectName}, showDict={'_id': 1}) if not projectId: raise HTTPException(status_code=503, detail='projectNotExist') try: result = await addBasicWords(dbPrefix + '-' + projectId, 'basicWords', ItemInfos=newBasicWords) except pymongo.errors.BulkWriteError as e: # 出现重复的key,只把出处(source)添加进 source 列表,其他项丢弃 # 1- 找到所有重复的项目 print('出现重复key') temp = e.details['writeErrors'] #print('temp',temp) # 1- 查找 word 的项,对应的 出处 duplicatedItems = [] for ele in temp: print('ele: ', ele) queryDict = {'word': ele['op']['word']} shownDict = {'_id': 0, 'source': 1} result = await fetchBasicWords(dbPrefix + '-' + projectId, 'basicWords', xfilter=queryDict, xshown=shownDict) if result['count'] == 1: # 存在,修改并更新 tempSource = result['content'][0]['source'] # 如果 来源,已经存在,那么返回重复,否则,将新来源,添加进 来源列表 if ele['op']['source'][0].lower() in tempSource: # 已经存在,直接返回 #raise HTTPException(status_code=503, detail=f'以下基础词出现重复,未添加: [{ele["op"]["word"]}]') duplicatedItems.append(ele["op"]["word"]) else: # 新的 来源,则插入 tempSource.append(ele['op']['source'][0]) tempSource = list(set(tempSource)) #print(tempSource) # 2- 覆盖之前的词, 在basicWords中 try: timestamp = time.strftime("%Y/%m/%d %H:%M:%S", time.localtime()) result = await updateBasicWords( dbPrefix + '-' + projectId, 'basicWords', queryDict={"word": ele['op']['word']}, setDict={ "$set": { 'source': tempSource, 'timestamp': timestamp } }) except Exception as e: #print(e) continue else: # 不存在,跳过. 应该是不存在的情况 continue if duplicatedItems != []: # 存在重复数据(来源也重复) raise HTTPException(status_code=503, detail='以下基础词出现重复,未添加: ' + str(duplicatedItems)) # 返回最终数据 result = await fetchBasicWords(dbPrefix + '-' + projectId, 'basicWords') return (result) except Exception as e: raise HTTPException(status_code=503, detail=json.loads(json_util.dumps(e.details))) else: return (result)
def check_goals(self): """ try: contextlist = self.all_access_model(cont=["Devices"]) except Exception,e: print "Devices not found error",e try: contextlist2 = self.all_access_model(cont=["Apps"]) except Exception,e: print "Apps not found error",e contextlist = contextlist + contextlist2 """ goalcontext = ['Goals','Health'] goalcontext_obj = self.all_access_model(cont=goalcontext) print goalcontext_obj import datetime today = datetime.datetime.now() current_date = date(today.year, today.month, today.day) app_goal_list = list() goal_list = ["CurrentLevelGoal","StepGoal","ModerateActivityGoal","IntenseActivityGoal","AvoidInactivityGoal"] for g in goal_list: if g in goalcontext_obj: goalcontext.append(g) show_chart = 0 #---Get the target value goal_target_val = self.get_evidence_new(context = goalcontext, componentid="target_value") print goal_target_val try: if goal_target_val: goal_target_v = (goal_target_val[-1].value).split(' ')[0] else: goal_target_v = "none" except Exception,e: print e #--Find out how many times in a week you are following the goal goal_target_freq = self.get_evidence_new(context = goalcontext, componentid="target_frequency") if goal_target_freq: goal_target_f = (goal_target_freq[-1].value).split(' ')[0] else: goal_target_f = "none" #--Find out when the goal was set goal_startdate = self.get_evidence_new(context=goalcontext, componentid="goal_startdate") #--How many days in between the goal set date goal_duration = self.get_evidence_new(context=goalcontext, componentid="goal_duration") if goal_duration[-1].value == "no change": for dd in goal_duration[::-1]: if dd.value != "no change": betweendays = self.get_days_in_between(dd.value) break else: betweendays = self.get_days_in_between(goal_duration[-1].value) #---Get the date when the goal is set and how many days have been passed #start_time = time.localtime(int(goal_startdate[-1].creation_time)) start_time = time.mktime(time.strptime(goal_startdate[-1].value, '%d/%m/%Y')) start_time = time.localtime(int(start_time)) goal_start_date = date(start_time[0], start_time[1], start_time[2]) print "This goal is set on ", goal_start_date print "This goal will go for", goal_duration[-1].value days_passed = (current_date - goal_start_date).days #---Get the date when the goal will end goal_end_date = goal_start_date + timedelta(days = betweendays) #--Find out whether the goal is still on. If it is still on, then show the chart and check how many days left. Beacuse there might be data from previous goal. days_left = 0 days_target_needed_to_be_met = 0 days_target_met = 0 if goal_end_date >= current_date: show_chart = 1 days_left = (goal_end_date - current_date).days email_id = self.get_evidence_new(context = ['Personal'], componentid="email") if email_id[-1].value == "*****@*****.**" and g == "CurrentLevelGoal": days_passed = 5 days_left = 2 show_chart = 1 days_target_needed_to_be_met = 0 days_target_met = 0 if email_id[-1].value == "*****@*****.**" and g == "StepGoal": days_passed = 0 days_left = 12 * 7 days_target_needed_to_be_met = 0 days_target_met = 0 if email_id[-1].value == "*****@*****.**" and g == "CurrentLevelGoal": days_passed = 7 days_left = 0 show_chart = 0 days_target_needed_to_be_met = 0 days_target_met = 0 if email_id[-1].value == "*****@*****.**" and g == "StepGoal": days_passed = 25 days_left = 9 * 7 + 3 show_chart = 1 #if goal_target_val[-1].flags[0] == "New": days_target_needed_to_be_met = 17 days_target_met = 14 #elif goal_target_val[-1].flags[0] == "Revised": # days_target_needed_to_be_met = 0 # days_target_met = 0 if email_id[-1].value == "*****@*****.**" and g == "CurrentLevelGoal": days_passed = 7 days_left = 0 show_chart = 0 if email_id[-1].value == "*****@*****.**" and g == "StepGoal": days_passed = 11 * 7 + 2 days_left = 5 show_chart = 1 #if goal_target_val[-1].flags[0] == "New": days_target_needed_to_be_met = 11 * 7 + 2 days_target_met = 70 print "Days:::",days_passed, days_left new_goal = goals_set(g, goal_target_v, goal_target_f, show_chart, days_passed, days_left, days_target_needed_to_be_met, days_target_met) app_goal_list.append(self.todict(new_goal, new_goal.name)) goalcontext.remove(g)
#print(noon_today.time()) import time now = time.time() #print(now) #print(time.ctime(now)) #print(time.localtime(now)) #print(time.gmtime(now)) tm = time.localtime(now) #print(time.mktime(tm)) fmt = "It's %A, %B, %d, %Y, local time %I:%M:%S%p" t = time.localtime() #print(t) print(time.strftime(fmt, t)) some_day = date(2017, 2, 4) #print(some_day.strftime(fmt)) #from datetime import time
print("Current day:", today.day) from datetime import time a = time(11, 34, 56, 4567) print(a) print("hour =", a.hour) print("minute =", a.minute) print("second =", a.second) print("microsecond =", a.microsecond) ''' TIME MODULE : ''' import time t = time.localtime() current_time = time.strftime("%H:%M:%S", t) print(current_time) import time print("This is printed immediately.") time.sleep(2.4) print("This is printed after 2.4 seconds.") #DIGITAL CLOCK import time while True: localtime = time.localtime() result = time.strftime("%I:%M:%S %p", localtime) print(result)
write_to_log("Pumpe an um: ") pumpe_an = True GPIO_14_on() time.sleep(20) #andernfalls ist die Pumpe aus else: if pumpe_an == True: write_to_log("Pumpe aus um: ") pumpe_an = False GPIO_14_off() #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ else: if feierabend == False: feierabend = True GPIO_14_off() pause = True write_to_log("Feierabend: ") #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ #solange in der Schleife bis aktuelle Uhrzeit mindestens 9:00 Uhr while pause: now = time.localtime() if int(datetime.now().strftime("%H")) > 8 and int( datetime.now().strftime("%H")) < 17: pause = False time.sleep(1) #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ time.sleep(1)
def get_historic_data_with_target(self, fbt, form, goal_type): import datetime today = datetime.datetime.now() current_date = date(today.year, today.month, today.day) keys = ('year','month','day','activity','goal') #-- Look for the goal type context = ['Goals','Health'] context.append(goal_type) #-- Take all goal targets so far goal_target_val = self.personis_um.get_evidence_new(context = context, componentid="target_value") #-- Take latest goal duration..how long you will pursue the goal goal_duration = self.personis_um.get_evidence_new(context=context, componentid="goal_duration") index = 0 evdlist = self.personis_um.get_evidence_new(context=['Devices','Fitbit','Activity'], componentid=form) print "Number of goals:", len(goal_target_val) datadict_list = [] email_id = self.personis_um.get_evidence_new(context=['Personal'], componentid="email") for gindex in range(len(goal_target_val)): print "Print data so far" print json.dumps(datadict_list) goal_target = (goal_target_val[gindex].value).split(' ')[0] print "Looking data from target ",goal_target start_time = time.localtime(int(goal_target_val[gindex].time)) goal_start_date = date(start_time[0], start_time[1], start_time[2]) print "This goal is set on ", goal_start_date #--How many days in between the goal set date and goal finish deadline if goal_duration[gindex].value != "no change": betweendays = self.get_days_in_between(goal_duration[gindex].value) else: betweendays = self.get_days_in_between(goal_duration[gindex-1].value) goal_end_date = goal_start_date + timedelta(days = betweendays) if gindex < len(goal_target_val)-1: goal_target_next = (goal_target_val[gindex+1].value).split(' ')[0] next_start_time = time.localtime(int(goal_target_val[gindex+1].time)) next_goal_start_date = date(next_start_time[0], next_start_time[1], next_start_time[2]) print "Next goal is starting on ", next_goal_start_date if next_goal_start_date < goal_end_date: betweendays = (next_goal_start_date - goal_start_date).days print "Pursue this goal till %s for %d days" % (goal_end_date,betweendays) #index = index + 1 prev_date = goal_start_date interval = betweendays for i in range(1,interval): newval = 0 #-- Loop through the evidence list for j in range(len(evdlist)): #-- If this evidence is not already visited and #-- check the flag if the evidence is minute data (not summary data) if evdlist[j].value != -1:# and evdlist[j].flags[0] == 'minute': tt = time.localtime(int(evdlist[j].time)) track_date = date(tt[0],tt[1],tt[2]) if track_date == prev_date: #print evdlist[j] if goal_type == "StepGoal": newval = newval + evdlist[j].value evdlist[j].value = -1 #-- mark as visited elif goal_type == "ModerateActivityGoal": if evdlist[j].value == 2: newval = newval + 1 evdlist[j].value = -1 #-- mark as visited elif goal_type == "IntenseActivityGoal": if evdlist[j].value == 3: newval = newval + 1 evdlist[j].value = -1 #-- mark as visited #print "Year:%d, Month:%d, Day:%d, Value: %d" %(prev_date.year,prev_date.month,prev_date.day,evdlist[j].value) #-----------------------alex personis user study if email_id[-1].value == "*****@*****.**" and goal_type == "StepGoal": if prev_date.day == 4 and prev_date.month == 2: newval = 5902 if email_id[-1].value == "*****@*****.**" and goal_type == "StepGoal": newval = 0 if email_id[-1].value == "*****@*****.**" and goal_type == "StepGoal": if prev_date.day == 4 and prev_date.month == 2: newval = 5902 if prev_date.day == 10 and prev_date.month == 2: newval = 0 if prev_date.day == 11 and prev_date.month == 2: newval = 0 if prev_date.day == 12 and prev_date.month == 2: newval = 0 if newval != 0: tag_list=list() tag_list.append(prev_date.year) tag_list.append(prev_date.month) tag_list.append(prev_date.day) tag_list.append(newval) tag_list.append(string.atoi(goal_target)) #print "Year:%d, Month:%d, Day:%d, Value: %d" %(prev_date.year,prev_date.month,prev_date.day,newval) data_dictionary = dict(itertools.izip(keys, tag_list)) datadict_list.append(data_dictionary) prev_date = prev_date + timedelta(days=1) print json.dumps(datadict_list) return json.dumps(datadict_list)
def TimeFromTicks(ticks): import time return Time(*time.localtime(ticks)[3:6])
while True: robot = noi.Recognizer() # biến tự đặt with noi.Microphone() as mic: print(" chào Hoàng Bạn cần gì") audio = robot.listen(mic) #biến tự đặt print("loading...") try: you = robot.recognize_google(audio, language='vi-VN') except: you = "tôi không nghe rõ mời bạn thử lại" print("Hoang: " + you) if "giờ" in you: time1 = time.localtime() you = time.strftime("%I:%M:%S %p", time1) print("bot: ", you) elif "ngày" in you: today = str(date.today()) you = today print("bot: ", you) elif "thời tiết" in you: r = requests.get( 'http://api.openweathermap.org/data/2.5/weather?q=DaNang&APPID=85cedb588bf04ea4df4d757ddeaadff2' ).json() thoitiet1 = r['weather'][0]['description'] print('description: ', thoitiet(thoitiet1)) you = thoitiet(thoitiet1) # continue elif "Google" in you:
def clock(self): while True: time.sleep(1) self.wakeButton.setText( strftime("%a, %d %b %Y %H:%M:%S", time.localtime()))
def clockwatcher_main(): syslog.syslog("clockwatcherd: starting clockwatcher_main") lifetime = timedelta(days=1) #Notif expiration delta s = Signer("/etc/clockwatcher/shiny.private", "shiny") addrlist=[] updates={} with open("/etc/clockwatcher/clockwatcherd.cfg","r") as cfg: for line in cfg: addrlist.append(line[:-1]) #remembering to remove trailing \n while 1: # Synchronize to next whole minute starttime = time.localtime() time.sleep(60-starttime.tm_sec) currtime = datetime.now()+ timedelta(seconds=30) # Force rounding in case we're early timemsg = currtime.strftime("It is now %H:%M") notif = Notification(4, lifetime, timemsg, timemsg + " and all is well") # Need to add expiration here notif.prepare(s) # For now, minimizing the possibility of a collision between this daemon and new authorizations coming in # by reading the additional authorizations from a separate file and adding them on here. Only the daemon # touches the main clockwatcherd.cfg file. rewrite = False try: with open("/etc/clockwatcher/newwatchers.cfg","r") as cfg: for line in cfg: newaddr = line if newaddr not in addrlist: #Handle unlikely duplicates addrlist.append(newaddr) rewrite = True except IOError: pass except: syslog.syslog("clockwatcherd: Unknown error opening newwatchers file") quit() if rewrite: cfg=open("/etc/clockwatcher/newwatchers.cfg","w") #Clobber newwatchers file cfg.close() with open("/etc/clockwatcher/clockwatcherd.cfg","w") as cfg: #Update config with new watchers for idx in range(len(addrlist)): if addrlist[idx] != "": cfg.write(addrlist[idx]) cfg.write("\n") rewrite = False for idx in range(len(addrlist)): notaddr = addrlist[idx] if notaddr == "": continue if notaddr in updates: #update an existing notif if possible notid = updates[notaddr] status = notif.update(notid) if status == 404: #if 404 delete notid from updates del updates[notaddr] if notaddr not in updates: #not an else because it could have just been removed # TODO: Handle exceptions (can't connect, etc.) here (notid, status) = notif.send(notaddr) #Need to get feedback on send failures, delete notaddr if status == 404: addrlist[idx]="" #Don't delete entry from addrlist inside loop, just blank it rewrite = True #Disk copy of list needs updating elif status == 200: updates[notaddr] = notid if rewrite: #Update disk copy of list, removing any blank addresses with open("/etc/clockwatcher/clockwatcherd.cfg","w") as cfg: for idx in range(len(addrlist)): if addrlist[idx] != "": cfg.write(addrlist[idx]) cfg.write("\n")
def get_now_time(): cc = time.localtime(time.time()) return cc
def TimeFromTicks(ticks): import time return Time(*time.localtime(ticks)[3:6])
def AddWebBookMark(self, folder , article_name , article_title , article_url ,article_desc): """Invoke Jmeter passing the right arguments .""" article_name , article_title , article_url ,article_desc = re.sub('\n' , '<BR/>' , article_name) ,re.sub('\n' , '<BR/>' ,article_title) , re.sub('\n' , '' ,article_url) ,re.sub('\n' , '<BR/>' ,article_desc) cmd = "C:/java/apache-jmeter-2.6/bin/jmeter.bat -Jinsta_user=\"%s\" -Jinsta_password=\"%s\" -Jfname=\"%s\" -Jarticle_name=\"%s\" -Jarticle_title=\"%s\" -Jarticle_url=\"%s\" -Jarticle_desc=\"%s\" -n -t InstaP.jmx -l log_%s.jtl " %( self.insta_user , self.insta_password , folder , article_name , article_title , article_url ,article_desc , time.strftime("%Y%m%d%H%M%S", time.localtime()) ) print cmd os.system(cmd)