def check_date(self, starttime, endtime): # Check that timestamps are valid and # that starttime < endtime try: time.strptime(starttime, "%Y-%m-%dT%H:%M:%S") time.strptime(endtime, "%Y-%m-%dT%H:%M:%S") except ValueError as error: print(error) return False else: # convert timestamps to seconds start = datetime.strptime(starttime, "%Y-%m-%dT%H:%M:%S") end = datetime.strptime(endtime, "%Y-%m-%dT%H:%M:%S") start = time.mktime(start.timetuple()) end = time.mktime(end.timetuple()) if (end - start > 168 * 60 * 6): self.debug("Time interval is more than 168 hours") self.debug("Adjust endtime to starttime + 12 hours") start = datetime.strptime(starttime, "%Y-%m-%dT%H:%M:%S") end = start + timedelta(hours=12) endtime = end.strftime('%Y-%m-%dT%H:%M:%S') starttime = start.strftime('%Y-%m-%dT%H:%M:%S') self.debug("starttime: " + starttime) self.debug("endtime: " + endtime) return starttime, endtime elif (end - start < 0): self.debug("endtime < startime") starttime, endtime = self.adjust_date(starttime, endtime) self.debug( "Starttime is greater than endtime -> adjust startime") self.debug("Starttime: " + starttime) self.debug("Endtime: " + endtime) return starttime, endtime else: return starttime, endtime
def berekenBedrag(inrijtijd, uitrijtijd): fmt = '%H:%M' d1 = datetime.strptime(inrijtijd, fmt) d2 = datetime.strptime(uitrijtijd, fmt) # Convert to Unix timestamp d1_ts = time.mktime(d1.timetuple()) d2_ts = time.mktime(d2.timetuple()) uurTarief = 0 dagTarief = 0 minuten = int(d2_ts - d1_ts) / 60 for tarief in Rest_Service.getTarieven(1)['response']: if tarief['type'] == 'Uur': uurTarief = tarief['waarde'] if tarief['type'] == 'Dag': dagTarief = tarief['waarde'] tariefMinuten = float(uurTarief) / 60 if (minuten * tariefMinuten) > float(dagTarief): return float(dagTarief) else: return minuten * tariefMinuten
def fourth_querry(): tmax, tmin = third_querry() dmax = datetime.strptime(tmax, "%Y-%m-%d %H:%M:%S") secondmax = time.mktime(dmax.timetuple()) dmin = datetime.strptime(tmin, "%Y-%m-%d %H:%M:%S") secondmin = time.mktime(dmin.timetuple()) all_plithos_msg = db.things.find().count() deltatimemean = ((secondmax - secondmin) / (all_plithos_msg - 1)) return deltatimemean
def add_new_evidence(self,context, data=None): try: print data, context componentid,value,flags,comment,user_date,useby_date =data.split("_") context = context.split("/") ev = client.Evidence(source=self.username, evidence_type="explicit", value=value) if comment != 'None': ev.comment=comment if flags != 'None': ev.flags = flags if user_date != 'None': ev.time=time.mktime(time.strptime(user_date, '%d/%m/%Y')) if useby_date != 'None': ev.useby=time.mktime(time.strptime(useby_date, '%d/%m/%Y')) self.um.tell(context=context, componentid=componentid, evidence=ev) return True except Exception,e: return False
def get_new_value(r1, rr2, dtx): str_d_1 = r1[0] str_d_2 = rr2[0] print "str1 " + str_d_1 dt = datetime.datetime.strptime(str_d_1.replace('"',''), "%Y-%m-%d %H:%M:%S") x1 = time.mktime(dt.timetuple()) dt = datetime.datetime.strptime(str_d_2.replace('"',''), "%Y-%m-%d %H:%M:%S") x2 = time.mktime(dt.timetuple()) x = time.mktime(dtx.timetuple()) y1 = float(r1[1]) y2 = float(rr2[1]) y = ((y1 - y2 )/(x1 - x2))*x + y2 - (((y1 - y2 )/(x1 - x2))*x2) if y < 0 : y = 0 return y
def __oggOrderFinished(st, ed, recover=False): oggfinished_all = pd.DataFrame(columns=['order_time', 'order_day', 'ogg_orders_finish', 'agg_orders_finish_dserv', 'city_id', 'serv_type']) if recover: et = st else: et = ed - 600 st = int(time.mktime(datetime.strptime(time.strftime('%Y-%m-%d', time.localtime(st)), '%Y-%m-%d').timetuple())) while et < ed: msql = ''' select date_format(from_unixtime(floor({et}/600)*600), '%Y-%m-%d %H:%i:00') as order_time, date_format(from_unixtime({et}), '%Y-%m-%d 00:00:00') as order_day, sum(orders_finish) as ogg_orders_finish, sum(orders_finish_dserv) as agg_orders_finish_dserv, city_id, serv_type from oride_orders_status_10min where order_time >= from_unixtime({st}) and order_time < from_unixtime({et}+600) group by city_id, serv_type '''.format(st=st, et=et) logging.info(msql) et += 600 oggfinished = pd.read_sql_query(msql, bidb_conn) oggfinished_all = oggfinished_all.append(oggfinished, ignore_index=True) __data_to_mysql( bidb, oggfinished_all.values.tolist(), ['order_time', 'daily', 'agg_orders_finish', 'agg_orders_finish_dserv', 'city_id', 'serv_type'], 'agg_orders_finish=values(agg_orders_finish),agg_orders_finish_dserv=values(agg_orders_finish_dserv)' )
def _parserByWeek(self, item, offset=1): return ( offset + 2, datetime.fromtimestamp( time.mktime(time.strptime("{} {} 1".format(item[offset + 0], item[offset + 1]), "%Y %W %w")) ).strftime("%s000"), )
def dma_millis(dia, mes, ano): d = date(ano, mes, dia) t = datetime.now().time() dt = datetime.combine(d, t) millis = time.mktime(dt.timetuple()) * 1000 + dt.microsecond / 1000 return int(millis)
def check_revisions(db): # get total amount of articles n_articles = db.articles().count() # get cunks of 100 items per query per_page = 100 # total amoint of iterations total = 1 + n_articles/per_page for page in xrange(1, total): # Get last revision of every article. Paginate on chunks of 100 pipe = [ {'$group':{'_id': "$title", 'date': { '$max': "$timestamp" }}}, {'$skip': (page-1)*per_page}, {'$limit': per_page}, ] cursor = db.revisions().aggregate(pipeline=pipe) last_rev= db.find_last_rev() for document in cursor: # Set revision date to YYYY-MM-DD 00:00:00 date = document['date'].replace(hour=0, minute=0, second=0) # Convert to integer date_integer = int(time.mktime(date.timetuple())) # Verify if article should be revisited should_revisit = calculate_v1(date_integer) if should_revisit: # Revisit article. Extract revisions. Add to celery queue extract_article.delay(document['_id'])
def datetime_to_unixtime(date_time): """ datetime -> unixtime :param date_time: :return: """ return int(time.mktime(date_time.timetuple()))
def save(self, *args, **kwargs): if not self.scheduled_datetime is None: import datetime import time d = datetime.datetime(self.scheduled_datetime.year, self.scheduled_datetime.month, self.scheduled_datetime.day, self.scheduled_datetime.hour, self.scheduled_datetime.minute, self.scheduled_datetime.second) epoch = time.mktime(d.timetuple()) self.timestamp = int(epoch) if self.calc is False: self.calc = True obj = queue_statistics.objects.get( username=self.username, selected_account=self.provider) obj.left = obj.left - 1 obj.save() init_noti(self.username, 199) if self.hit == True: obj = queue_statistics.objects.get(username=self.username, selected_account=self.provider) obj.left = obj.left + 1 obj.save() init_noti(self.username, 200) super().save(*args, **kwargs)
def date2timestamp(dt): # datetime to timestamp import time if not isinstance(dt, datetime): return dt timestamp = time.mktime(dt.timetuple()) + dt.microsecond / 1e6 return timestamp
def extractPublication(p): au = "0" if len(p.author_ids) > 0: au = p.author_ids[0] dt = datetime.datetime(p.year, 1, 1, 1, 1) t = int(time.mktime(dt.timetuple())) from terms.core import terms key_terms = terms[str(p.id)].keys() kt = "" for k in key_terms: if kt != "": kt += "," k = k.lower() kt += k children = [] children_ids = [] parents = [] parents_ids = [] for x in p.cited_by_pubs: children.append(str(x)) children_ids.append(x) for x in p.cite_pubs: parents.append(str(x)) parents_ids.append(x) y = [ str(p.id), str(p.id), str(au), children, 0, t, t, p.n_citations, p.n_citations, p.n_citations, p.authors, p.title, kt ] return y, children_ids, parents_ids, key_terms
def insert_db(self, lightningArray): self.debug("Connect to database") # get connection settings from cnf-file try: with open(CNFDIR + 'controller.cnf') as f: content = f.readlines() # remove white spaces and \n content = [x.strip() for x in content] # if first character is #, remove the line as a comment # save parameters as an array for x in content: if (x[0] != '#'): param = x.split('=') self.debug(param) if (param[0] == 'host'): host = x.split('=', 1) if (param[0] == 'user'): user = x.split('=', 1) if (param[0] == 'password'): password = x.split('=', 1) if (param[0] == 'database'): database = x.split('=', 1) if (param[0] == 'port'): port = x.split('=', 1) except Exception as error: print(error) cnx = mysql.connector.connect(host=host[1], port=port[1], user=user[1], password=password[1], db=database[1]) cursor = cnx.cursor(prepared="true") # add corresponding epoch time to data array lightning = list() for i in lightningArray: data = i.split() epoch = int( time.mktime( datetime.strptime(data[0], "%Y-%m-%dT%H:%M:%SZ").timetuple())) data.append(epoch) lightning.append(data) add_lightning = ( "INSERT INTO salama" "(time, lat, lon, peakcurrent, multiplicity, cloudindicator, ellipsemajor, epoc) " "VALUES (%s, %s, %s, %s, %s, %s, %s, %s)") # Insert lightning information self.debug("Add data to database") cursor.executemany(add_lightning, lightning) cnx.commit() cursor.close() cnx.close()
def add_evidence(self, context=[], component=None, value=None, comment=None, usertime=None): print component, value ev = Personis_base.Evidence(source="Personis_Access.py", evidence_type="explicit", value=value) ev.comment = comment if usertime: import time ev.time=time.mktime(time.strptime(usertime, '%Y-%m-%d %H:%M:%S')) self.um.tell(context=context, componentid=component, evidence=ev)
def datetime_to_unix(dt): """ Convert Python datetime object (timezone aware) to unixtime another implementation is function datetime_to_epoch(), this is better than datetime_to_epoch(), datetime_to_epoch() can be transferred to this function gradually. """ return time.mktime(dt.timetuple())*1e3 + dt.microsecond/1e3
def get_driver_num(**op_kwargs): driver_num = {} res = [] conn = get_db_conn('mysql_oride_data_readonly') mcursor = conn.cursor() driver_id = -1 results = tuple() driver_dic = {} while True: sql = query_driver_city_serv.format(id=driver_id) logging.info(sql) mcursor.execute(sql) conn.commit() tmp = mcursor.fetchall() if not tmp: break results += tmp driver_id = tmp[-1][0] mcursor.close() conn.close() for data in results: driver_dic[data[0]] = ",".join([str(data[1]), str(data[2])]) redis_conn = RedisHook(redis_conn_id='pika_85').get_conn() ts = op_kwargs['ts'] dt, h = ts.split('T') dt = dt + ' ' + h.split('+')[0] time_array = time.strptime(dt, "%Y-%m-%d %H:%M:%S") timestamp = int(time.mktime(time_array)) a_member = set() no_member = set() dt_start = time.strftime('%Y%m%d%H%M', time.localtime(timestamp)) for i in range(0, 10): dt = time.strftime('%Y%m%d%H%M', time.localtime(timestamp + i * 60)) a_member = a_member.union(set(redis_conn.smembers(active_a_driver % dt))) no_member = no_member.union(set(redis_conn.smembers(active_no_driver % dt))) for mem in a_member: tmp = driver_dic.get(int(mem), '0,0') if tmp not in driver_num: driver_num[tmp] = {"a_mem": 0, "no_mem": 0} driver_num[tmp]["a_mem"] += 1 for mem in no_member: tmp = driver_dic.get(int(mem), '0,0') if tmp not in driver_num: driver_num[tmp] = {"a_mem": 0, "no_mem": 0} driver_num[tmp]["no_mem"] += 1 for k, v in driver_num.items(): info = k.split(",") res.append([int(info[0]), int(info[1]), dt_start+'00', v["a_mem"], v["no_mem"]]) conn = get_db_conn('mysql_bi') mcursor = conn.cursor() mcursor.executemany(insert_driver_num, res) logging.info('insert num %s, data %s', len(res), str(res)) conn.commit() mcursor.close() conn.close()
def calculate_v1(t0): ti = int(time.mktime(datetime.now().timetuple())) result = 1 - (t0 / ti) if result > 0: return True else: return False
def p_http(t): from email import Utils import time tt = Utils.parsedate_tz(t) ts = time.mktime(tt[:9]) utc = 0 # (datetime.utcnow() - datetime.now()).seconds if tt[9] is not None: utc += tt[9] return datetime.fromtimestamp(ts - utc)
def get_unix_time(date_time_str, date_time_format='%Y-%m-%d-%H:%M:%S'): """ convert date time string to unixtime. """ if isinstance(date_time_str, str): return int( time.mktime( datetime.datetime.strptime(date_time_str, date_time_format).timetuple())) else: return date_time_str
def date2timestamp(dt): # datetime to timestamp if not isinstance(dt, datetime): return dt if (six.PY34): return dt.replace(tzinfo=timezone.utc).timestamp() else: timestamp = time.mktime(dt.timetuple()) + dt.microsecond / 1e6 return timestamp
def json_statistics(request): logger.debug("parsing logs") utils.exec_upri_config('parse_logs') truncate_date = connection.ops.date_trunc_sql('month', 'log_date') privoxy_qs = PrivoxyLogEntry.objects.extra({'month':truncate_date}) privoxy_log = privoxy_qs.values('month').annotate(Count('pk')).order_by('month') dnsmasq_qs = DnsmasqBlockedLogEntry.objects.extra({'month': truncate_date}) dnsmasq_log = dnsmasq_qs.values('month').annotate(Count('pk')).order_by('month') monthly = [[0]*5, [0]*5] now = time.localtime() months = [_localdate(datetime.fromtimestamp(time.mktime((now.tm_year, now.tm_mon - n, 1, 0, 0, 0, 0, 0, 0))),"F") for n in reversed(range(5))] for entry in privoxy_log: cur_month = _localdate(datetime.strptime(entry['month'], '%Y-%m-%d'), "F") monthly[1][months.index(cur_month)] = entry['pk__count'] for entry in dnsmasq_log: cur_month = _localdate(datetime.strptime(entry['month'], '%Y-%m-%d'), "F") monthly[0][months.index(cur_month)] = entry['pk__count'] privoxy_log = PrivoxyLogEntry.objects.values('url').annotate(Count('pk')).order_by('-pk__count') filtered_pages = list() dnsmasq_log = DnsmasqBlockedLogEntry.objects.values('url').annotate(Count('pk')).order_by('-pk__count') blocked_pages = list() for entry in privoxy_log[0:5]: #print entry filtered_pages.append({"url": entry['url'], "count": entry['pk__count']}) for entry in dnsmasq_log[0:5]: #print entry blocked_pages.append({"url": entry['url'], "count": entry['pk__count']}) today = datetime.now().date() total_blocked_queries = DnsmasqBlockedLogEntry.objects.count() today_blocked_queries = DnsmasqBlockedLogEntry.objects.filter(log_date__contains=today).count() pie1_data = [DnsmasqQueryLogEntry.objects.count() - total_blocked_queries, total_blocked_queries] pie2_data = [DnsmasqQueryLogEntry.objects.filter(log_date__contains=today).count() - today_blocked_queries, today_blocked_queries] return HttpResponse(json.dumps({'pie1_data': { 'series': pie1_data }, 'pie2_data': { 'series': pie2_data }, 'filtered_pages': filtered_pages, 'blocked_pages': blocked_pages, 'bar_data': { 'labels': months, 'series': monthly }}), content_type="application/json")
def test_fromtimestamp(self): import time # Try an arbitrary fixed value. # year, month, day = 1999, 9, 19 year, month, day = 2000, 9, 19 ### ts = time.mktime((year, month, day, 0, 0, 0, 0, 0, -1)) d = self.theclass.fromtimestamp(ts) self.assertEqual(d.year, year) self.assertEqual(d.month, month) self.assertEqual(d.day, day)
def get_date(self): try: t = datetime.now() print t EpochSeconds=time.mktime(t.timetuple()) #now = datetime.fromtimestamp(EpochSeconds, pytz.utc ) #now = now.astimezone(pytz.timezone('Australia/Sydney')) print "Now %s"%now return now except Exception, e: return e
def add_evidence(self, context=[], component=None, value=None, comment=None, usertime=None,flags=[]): print "Component type %s" % type(component) print "Component %s, Value %s" % (component,value) ev = client.Evidence(source="Personis_Access.py", evidence_type="explicit", value=value,flags=flags) ev.comment = comment if usertime: import time #if not usertime.time(): # ev.time=time.mktime(time.strptime(usertime, '%Y-%m-%d')) #else: ev.time=time.mktime(time.strptime(usertime, '%Y-%m-%d %H:%M:%S')) self.um.tell(context=context, componentid=component, evidence=ev)
def admin_dict(self, images_map): customer = self.customer.get() for item in self.items: item['images'] = images_map.get(item['id'], []) return { 'order_id': self.order_id, 'number': self.number, 'address': self.address, 'createdDate': int(time.mktime(self.created_in_iiko.timetuple())), 'deliveryDate': int(time.mktime(self.date.timetuple())), 'client_id': customer.customer_id, 'phone': customer.phone, 'client_name': customer.name, 'client_custom_data': customer.custom_data, 'comment': self.comment, 'sum': self.sum, 'items': self.items, 'venue_id': self.delivery_terminal_id, 'status': self.status, 'cancel_requested': self.cancel_requested, }
def add_guest(request): eid = request.POST.get('eid', '') # 关联发布会id realname = request.POST.get('realname', '') # 姓名 phone = request.POST.get('phone', '') # 手机号 email = request.POST.get('email', '') # 邮箱 if eid == '' or realname == '' or phone == '': return JsonResponse({'status': 10021, 'message': 'parameter error'}) result = Event.objects.filter(id=eid) if not result: return JsonResponse({'status': 10022, 'message': 'event id null'}) result = Event.objects.get(id=eid).status if not result: return JsonResponse({ 'status': 10023, 'message': 'event status is not available' }) event_limit = Event.objects.get(id=eid).limit # 发布会限制人数 guest_limit = Guest.objects.filter(event_id=eid) # 发布会已添加的嘉宾数 if len(guest_limit) >= event_limit: return JsonResponse({ 'status': 10024, 'message': 'event number is full' }) event_time = Event.objects.get(id=eid).start_time # 发布会时间 timeArray = time.strptime(str(event_time), "%Y-%m-%d %H:%M:%S") e_time = int(time.mktime(timeArray)) now_time = str(time.time()) # 当前时间 ntime = now_time.split(".")[0] n_time = int(ntime) if n_time >= e_time: return JsonResponse({'status': 10025, 'message': 'event has started'}) try: Guest.objects.create(realname=realname, phone=int(phone), email=email, sign=0, event_id=int(eid)) except IntegrityError: return JsonResponse({ 'status': 10026, 'message': 'the event guest phone number repeat' }) return JsonResponse({'status': 200, 'message': 'add guest success'})
def form_valid(self, form): bookinginfo_list = self.model.objects.all() date_format = '%Y-%m-%d %H:%M' new_start_time = str(self.request.POST['start_time']) new_start_day = new_start_time[:10] new_start_epoch = int(time.mktime(time.strptime(str(new_start_time), date_format))) new_end_time = str(self.request.POST['end_time']) new_end_epoch = int(time.mktime(time.strptime(str(new_end_time), date_format))) new_room_id = self.request.POST['room_id'] for BookingInfo in bookinginfo_list: if str(BookingInfo.start_time + timedelta(hours=9))[:10] == new_start_day and BookingInfo.room_id == new_room_id: if int(time.mktime((BookingInfo.end_time + timedelta(hours=9)).timetuple())) > new_start_epoch and new_start_epoch >= int( time.mktime((BookingInfo.start_time + timedelta(hours=9)).timetuple())): return HttpResponseRedirect('/booking/fail') elif new_end_epoch > int(time.mktime((BookingInfo.start_time + timedelta(hours=9)).timetuple())) and int( time.mktime((BookingInfo.start_time + timedelta(hours=9)).timetuple())) >= new_start_epoch: return HttpResponseRedirect('/booking/fail') else: pass form.instance.owner = self.request.user return super(BookingCreateView, self).form_valid(form)
def send_bid(req): if req.method == 'GET': return render(req,'hello.html') if 'chain' in req.session: chain= req.session['chain'] read = req.session['read'] write= req.session['write'] user = req.session['uid'] txid = req.session['tx_id'] uid = req.session['uid'] u = req.POST['user'] c = req.POST['chain'] #n = req.POST['number'] n =''.join([random.choice(string.digits) for _ in range(4)]) s = req.POST['start_price'] r = req.POST['real_price'] pro = req.POST['product'] exp = req.POST['exp'] days=range(1,30) curve = "secp256k1" key1= pyelliptic.ECC(curve=curve) key2 = pyelliptic.ECC(curve=curve) pubkey1=key1.get_pubkey().hex() prikey1=key1.get_privkey().hex() pubkey2=key2.get_pubkey().hex() prikey2=key2.get_privkey().hex() s2=key1.encrypt(s, key1.get_pubkey()).hex() r2=key2.encrypt(r, key2.get_pubkey()).hex() today=datetime.date.today() # exp_time = str(date(today.year,today.month,today.day+int(exp))) exp_time=str(datetime.date.today() + datetime.timedelta(days=int(exp))) message={"Type":"bid","User":u,"Chain":c,"Number":n,"Start_price":s2,"Real_price":r2,"Expiration_date":exp_time} me=json.dumps(message) result= OP_RETURN_store(me) re = result['txids'][0] #a=(date(today.year,today.month,today.day+int(exp))) #a=datetime.date.today() + datetime.timedelta(days=int(exp)) aa=datetime.datetime.combine(datetime.date.today()+ datetime.timedelta(days=int(exp)), datetime.time.max) da=timestamp = time.mktime(aa.timetuple()) bi,_ = Bid.objects.get_or_create(chain = c ,number = n,txid = re,pub_key1 = pubkey1,pri_key1 = prikey1,pub_key2 = pubkey2, pri_key2 = prikey2,exp_time = da,product = pro) bi.save() a=1 template = get_template('bid.html') html= template.render(locals()) return HttpResponse(html)
def get_next_date(self): key = datetime.datetime.now().strftime("%Y-%m-%d") if self.cache_storage.get(key, ''): print 'get from cache' return self.cache_storage.get(key) else: day = time.strftime('%Y-%m-%d', time.localtime(time.time())) day = time.strptime(day, "%Y-%m-%d") day = int(time.mktime(day)) self.cache_storage[key] = (day + 86400) print 'get from caculator' return (day + 86400)
def pecera(id_pecera): sql = """SELECT fecha , temperatura FROM registros_historicos;""" cur.execute(sql) datos_temperatura=cur.fetchall() sql = """SELECT fecha , temperatura FROM registros_historicos;""" cur.execute(sql) datos_ph = cur.fetchall() for i in range(2,100): print("datos_temperatura:\t",datos_temperatura) aux = [] for elem in datos_temperatura: aux2 = [int(time.mktime(elem[0].timetuple()) * 1000),elem[1]] aux.append(aux2) datos_temperatura = aux aux = [] for elem in datos_ph: aux2 = [int(time.mktime(elem[0].timetuple()) * 1000),elem[1]] aux.append(aux2) datos_ph = aux print("datos_temperatura : ",datos_temperatura) print("datos_ph : " , datos_ph) return render_template("pecera.html" , datos_temperatura = datos_temperatura , datos_ph = datos_ph)
def cal_summary_task(cur_day, dso_name): console_res = subprocess.Popen( "hadoop fs -ls /user/hive/warehouse/dso.db/" + dso_name + " | tail -n 10", shell=True, stdout=subprocess.PIPE) # 更新最近10天的明细数据源的出数时间 console_res_str = console_res.stdout.read() dir_info_list = console_res_str.split("\n") for dir_info in dir_info_list: # ['drwxr-xr-x', '-', 'analysis', 'supergroup', '0', '2021-01-10', '15:00', '/user/hive/warehouse/dso.db/mobiledictclient_android/day=2021-01-09'] str_list = re.split("\s+", dir_info) if len(str_list) > 7: # dso明细数据源的day dso_day = re.findall("day=([\d-]*)", str_list[-1])[0] # 正常的话应该是dso_day的明天 expect_day = (datetime.strptime(dso_day, "%Y-%m-%d") + oneday).strftime("%Y-%m-%d") expect_time = expect_day + " " + expect_hour + ":00:00" expect_ts = time.mktime( time.strptime(expect_day + " " + expect_hour, "%Y-%m-%d %H")) # 明细数据源的实际完成时间 finish_day = str_list[5] finish_day_time = str_list[5] + " " + str_list[6] finish_ts = time.mktime( time.strptime(finish_day_time, "%Y-%m-%d %H:%M")) late_mins = 0 status = "success" # 考虑到有重跑数据的情况, # 1、如果完成时间跟dso_day的明天一致的话,判断延迟分钟数,有延迟的话计算延迟分钟,没有延迟的话,设定late_mins=0 # 2、如果完成时间跟dso_day的明天不一致的话,认为这是重跑的任务,设定status=overwrite,late_mins=-0.001(可能会有明细数据源后天才出来,这种情况应该极少,暂时认定为重跑吧) if finish_day != expect_day: status = "overwrite" late_mins = -0.001 elif finish_ts > expect_ts: late_mins = (finish_ts - expect_ts) / 60 task_info_list.append((dso_name, dso_day, expect_time, finish_day_time + ":00", late_mins, status)) return task_info_list
def cancel_eway_bill(self): d1 = datetime.strptime(self.tokan_gen_time, "%Y-%m-%d %H:%M:%S") d1_ts = time.mktime(d1.timetuple()) d2 = datetime.now() print("Before if condition****************") d2_ts = time.mktime(d2.timetuple()) if (int(d2_ts - d1_ts) / 60) > 360: # pdb.set_trace() print("Inside if condition****************") self.auth() # defining the api-endpoint API_ENDPOINT = "https://aspone.in/api/ewaybills/cancel_eway_bill" # API_ENDPOINT = self.from_name.cancel_url headers = { 'content-type': 'application/json', 'accept': 'application/json', 'gstin': self.from_name.vat, 'clientid': 'snf3f9e69dfa0b76b12', 'client-secret': 'snfa6e36f63adad4db0' } data = { "eway_bill": { "ewbNo": self.eway_bill_no, "cancelRsnCode": 2, "cancelRmrk": "Cancelled the order" } } r = requests.post(url=API_ENDPOINT, headers=headers, data=json.dumps(data)) self.status = 'cancel' pastebin_url = r.text print("The pastebin URL is:%s" % pastebin_url)
def _convert_obj(obj): """ Convert a series to pytables values and Atom """ if isinstance(obj, pd.DatetimeIndex): converted = obj.asi8 return converted, 'datetime64', tb.Int64Atom() elif isinstance(obj, pd.PeriodIndex): converted = obj.values return converted, 'periodindex', tb.Int64Atom() elif isinstance(obj, pd.PeriodIndex): converted = obj.values return converted, 'int64', tb.Int64Atom() inferred_type = lib.infer_dtype(obj) values = np.asarray(obj) if inferred_type == 'datetime64': converted = values.view('i8') return converted, inferred_type, tb.Int64Atom() if inferred_type == 'string': # TODO, am I doing this right? converted = np.array(list(values), dtype=np.bytes_) itemsize = converted.dtype.itemsize # for OBT, can't assume value will be right for future # frame keys if itemsize < MIN_ITEMSIZE: itemsize = MIN_ITEMSIZE converted = converted.astype("S{0}".format(itemsize)) return converted, inferred_type, tb.StringAtom(itemsize) elif inferred_type == 'unicode': # table's don't seem to support objects raise Exception("Unsupported inferred_type {0}".format(inferred_type)) converted = np.asarray(values, dtype='O') return converted, inferred_type, tb.ObjectAtom() elif inferred_type == 'datetime': converted = np.array([(time.mktime(v.timetuple()) + v.microsecond / 1E6) for v in values], dtype=np.float64) return converted, inferred_type, tb.Time64Atom() elif inferred_type == 'integer': converted = np.asarray(values, dtype=np.int64) return converted, inferred_type, tb.Int64Atom() elif inferred_type == 'floating': converted = np.asarray(values, dtype=np.float64) return converted, inferred_type, tb.Float64Atom() raise Exception("Unsupported inferred_type {0} {1}".format(inferred_type, str(values[-5:])))
def graph(request, hash): filePath = '/mnt/vol/csvs/' yValues = [] xValues = [] xAxis = '' yAxis = '' thing = [] valuesList = [] keyWords = [] #Read in csv file and gather information for passing to graph #with open('test.csv', 'r') as csvfile: #For testing with open(filePath + hash + '.csv', 'r') as csvfile: reader = csv.DictReader(csvfile) xAxis = reader.fieldnames[0] yAxis = reader.fieldnames[1] keyword = reader.fieldnames[2] for row in reader: valuesList.append((row[xAxis], row[yAxis], row[keyword])) for item in valuesList: if (item[2] not in keyWords): keyWords.append(item[2]) yValuesList = [] for word in keyWords: yTempValues = [] for thing in valuesList: if (thing[2] == word): try: yTempValues.append(float(thing[1])) except ValueError: yTempValues.append(-200) pass #Convert date to a format that can be sent to javascript date = datetime.strptime(thing[0], '%Y-%m-%d').date() timestamp = int(time.mktime(date.timetuple())) * 1000 if (timestamp not in xValues): xValues.append(timestamp) yValuesList.append(yTempValues) context = { 'xAxis': xAxis, 'yAxis': yAxis, 'xValues': xValues, 'keywords': keyWords, 'yValues': yValuesList, } return render(request, 'words/graph2.html', context)
def pre_save(self, model_instance, add): timestamp = self.get_timestamp(model_instance, add) score = self.get_score(model_instance, add) try: seconds = timestamp.timestamp() except AttributeError: # Must not be python>=3.3 try: seconds = (timestamp - datetime(1970, 1, 1, tzinfo=timezone.utc)).total_seconds() except AttributeError: # Must not be python>=2.7 seconds = time.mktime(timestamp.timetuple()) value = self.calculate_heat(score, seconds) setattr(model_instance, self.attname, value) return super(HeatIndexField, self).pre_save(model_instance, add)
def test_more_ctime(self): # Test fields that TestDate doesn't touch. import time t = self.theclass(2002, 3, 2, 18, 3, 5, 123) self.assertEqual(t.ctime(), "Sat Mar 2 18:03:05 2002") # Oops! The next line fails on Win2K under MSVC 6, so it's commented # out. The difference is that t.ctime() produces " 2" for the day, # but platform ctime() produces "02" for the day. According to # C99, t.ctime() is correct here. # self.assertEqual(t.ctime(), time.ctime(time.mktime(t.timetuple()))) # So test a case where that difference doesn't matter. t = self.theclass(2002, 3, 22, 18, 3, 5, 123) self.assertEqual(t.ctime(), time.ctime(time.mktime(t.timetuple())))
def user_sign(request): eid = request.POST.get('eid', '') # 发布会id phone = request.POST.get('phone', '') # 嘉宾手机号 if eid == '' or phone == '': return JsonResponse({'status': 10021, 'message': 'parameter error'}) result = Event.objects.filter(id=eid) if not result: return JsonResponse({'status': 10022, 'message': 'event id null'}) result = Event.objects.get(id=eid).status if not result: return JsonResponse({ 'status': 10023, 'message': 'event status is not available' }) event_time = Event.objects.get(id=eid).start_time # 发布会时间 timeArray = time.strptime(str(event_time), "%Y-%m-%d %H:%M:%S") e_time = int(time.mktime(timeArray)) now_time = str(time.time()) # 当前时间 ntime = now_time.split(".")[0] n_time = int(ntime) if n_time >= e_time: return JsonResponse({'status': 10024, 'message': 'event has started'}) result = Guest.objects.filter(phone=phone) if not result: return JsonResponse({'status': 10025, 'message': 'user phone null'}) result = Guest.objects.filter(phone=phone, event_id=eid) if not result: return JsonResponse({ 'status': 10026, 'message': 'user did not participate in the conference' }) result = Guest.objects.get(event_id=eid, phone=phone).sign if result: return JsonResponse({'status': 10027, 'message': 'user has sign in'}) else: Guest.objects.filter(phone=phone).update(sign='1') return JsonResponse({'status': 200, 'message': 'sign success'})
def map_payment_intents(stripe: stripeapi, fn: Callable[[Dict[str, str]], None], since: Optional[date] = None) -> int: """ Calls a function on every PaymentIntent""" created = None if since: created = {'gte': int(time.mktime(since.timetuple()))} # iterate over all the paymentintents pis = stripe.PaymentIntent.list(limit=100, created=created) count = 0 for pi_instance in pis.auto_paging_iter(): fn(_pi_to_dict(pi_instance)) count += 1 return count
def init_time(): import ctypes import ctypes.util import time proc2 = subprocess.Popen('gphoto2 --set-config=iso=10000', shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) proc2.wait() proc = subprocess.Popen( 'gphoto2 --capture-image-and-download --filename /home/pi/TL/datetime.jpg --force-overwrite', shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) proc.wait() res = proc.communicate() list_f = res[0] if list_f.find("debug") == -1: proc = subprocess.Popen('exiftool /home/pi/TL/datetime.jpg', shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) proc.wait() res = proc.communicate() find_Date = res[0].find("Date/Time Original") cam_datetime = res[0][find_Date + 34:find_Date + 53] #print cam_datetime f_datatime = datetime.strptime(cam_datetime, '%Y:%m:%d %H:%M:%S') #print f_datatime time_tuple = (f_datatime.year, f_datatime.month, f_datatime.day, f_datatime.hour, f_datatime.minute, f_datatime.second, 0) #print time_tuple CLOCK_REALTIME = 0 class timespec(ctypes.Structure): _fields_ = [("tv_sec", ctypes.c_long), ("tv_nsec", ctypes.c_long)] librt = ctypes.CDLL(ctypes.util.find_library("rt")) ts = timespec() ts.tv_sec = int(time.mktime(datetime(*time_tuple[:6]).timetuple())) ts.tv_nsec = time_tuple[6] * 1000000 # Millisecond to nanosecond librt.clock_settime(CLOCK_REALTIME, ctypes.byref(ts)) else: printlog("CAMERA DATA ERROR") TIMES = datetime.now() return TIMES
def to_dict(self): model = self output = {} for key, prop in model.properties().iteritems(): value = getattr(model, key) if value is None or isinstance(value, SIMPLE_TYPES): output[key] = value elif isinstance(value, date): # Convert date/datetime to ms-since-epoch ("new Date()"). ms = time.mktime(value.utctimetuple()) * 1000 ms += getattr(value, 'microseconds', 0) / 1000 output[key] = int(ms) elif isinstance(value, db.Model): output[key] = to_dict(value) else: raise ValueError('cannot encode ' + repr(prop)) output["id"] = model.key().id() return output
def to_dict(self): # to_dict taken from posten-mock[1] by Johan Mjönes and the stack # overflow[2] answer by David Wilson. Thanks to both of you. # [1] https://github.com/nollbit/posten-mock # [2] http://stackoverflow.com/questions/1531501/json-serialization-of-google-app-engine-models model = self output = {} for key, prop in model.properties().iteritems(): value = getattr(model, key) if value is None or isinstance(value, SIMPLE_TYPES): output[key] = value elif isinstance(value, date): # Convert date/datetime to ms-since-epoch ("new Date()"). ms = time.mktime(value.utctimetuple()) * 1000 ms += getattr(value, 'microseconds', 0) / 1000 output[key] = int(ms) elif isinstance(value, db.Model): output[key] = to_dict(value) #else: #raise ValueError('cannot encode ' + repr(prop)) output["id"] = model.key().id() return output
def get_current_mjd(): """ *Get the current datetime as MJD* **Key Arguments:** - ``None`` **Return:** - ``MJD`` -- Current datetime as MJD """ ################ > IMPORTS ################ ## STANDARD LIB ## from datetime import datetime import time ## THIRD PARTY ## ## LOCAL APPLICATION ## ################ > VARIABLE SETTINGS ###### ################ >ACTION(S) ################ mjd = None now = datetime.now() now = now.strftime("%Y-%m-%d %H:%M:%S") try: year, month, day = now[0:10].split('-') hours, minutes, seconds = now[11:19].split(':') t = (int(year), int(month), int(day), int( hours), int(minutes), int(seconds), 0, 0, 0) unixtime = int(time.mktime(t)) mjd = unixtime / 86400.0 - 2400000.5 + 2440587.5 except ValueError as e: mjd = None print "String is not in SQL Date format." return mjd
def get_seconds(d): import time return time.mktime(force_to_datetime(d).utctimetuple())
def onchange_employee_id(self, cr, uid, ids, date_from, date_to, employee_id=False, contract_id=False, context=None): empolyee_obj = self.pool.get('hr.employee') contract_obj = self.pool.get('hr.contract') worked_days_obj = self.pool.get('hr.payslip.worked_days') input_obj = self.pool.get('hr.payslip.input') if context is None: context = {} #delete old worked days lines old_worked_days_ids = ids and worked_days_obj.search(cr, uid, [('payslip_id', '=', ids[0])], context=context) or False if old_worked_days_ids: worked_days_obj.unlink(cr, uid, old_worked_days_ids, context=context) #delete old input lines old_input_ids = ids and input_obj.search(cr, uid, [('payslip_id', '=', ids[0])], context=context) or False if old_input_ids: input_obj.unlink(cr, uid, old_input_ids, context=context) #defaults res = {'value':{ 'line_ids':[], 'input_line_ids': [], 'worked_days_line_ids': [], #'details_by_salary_head':[], TODO put me back 'name':'', 'contract_id': False, 'struct_id': False, 'contract_structure':None } } if (not employee_id) or (not date_from) or (not date_to): return res ttyme = datetime.fromtimestamp(time.mktime(time.strptime(date_from, "%Y-%m-%d"))) employee_id = empolyee_obj.browse(cr, uid, employee_id, context=context) res['value'].update({ 'name': _('Salary Slip of %s for %s') % (employee_id.name, tools.ustr(ttyme.strftime('%B-%Y'))), 'company_id': employee_id.company_id.id, 'employee_name':employee_id.name_related, # Customize 'contract_id':employee_id.cont_id.id, 'struct_id':employee_id.structure_id.id, 'contract_structure':str(employee_id.cont_id.type_id.name) + '/' + str(employee_id.cont_id.contract_type.name) + '/' + str(employee_id.cont_id.contract_detail.name) }) ''' if not context.get('contract', False): #fill with the first contract of the employee contract_ids = self.get_contract(cr, uid, employee_id, date_from, date_to, context=context) else: if contract_id: #set the list of contract for which the input have to be filled contract_ids = [contract_id] else: #if we don't give the contract, then the input to fill should be for all current contracts of the employee contract_ids = self.get_contract(cr, uid, employee_id, date_from, date_to, context=context) if not contract_ids: return res contract_record = contract_obj.browse(cr, uid, contract_ids[0], context=context) res['value'].update({ 'contract_id': employee_id.cont_id.id#contract_record and contract_record.id or False }) struct_record = contract_record and contract_record.struct_id or False if not struct_record: return res res['value'].update({ 'struct_id': struct_record.id, }) #computation of the salary input worked_days_line_ids = self.get_worked_day_lines(cr, uid, contract_ids, date_from, date_to, context=context) input_line_ids = self.get_inputs(cr, uid, contract_ids, date_from, date_to, context=context) res['value'].update({ 'worked_days_line_ids': worked_days_line_ids, 'input_line_ids': input_line_ids, }) ''' return res
#sample python code for datetime manipulations from datetime import datetime, date, time import time today = datetime.now() print "today" ,today print "year" ,today.year print "month" ,today.month print "day" ,today.day print "hour" ,today.hour print "min" ,today.minute print "sec" ,today.second timeSec = time.mktime(today.timetuple()) print "time sec",timeSec timeSec = timeSec+60 locTime = time.strftime("%Y-%m-%d %H:%M:%S",time.localtime(timeSec)) print "local time : ", locTime dt = datetime.strptime("2015-10-06 12:44:22", "%Y-%m-%d %H:%M:%S") sec1=time.mktime(dt.timetuple()) dt2 = datetime.strptime("2015-10-06 12:45:22", "%Y-%m-%d %H:%M:%S") sec2 = time.mktime(dt2.timetuple()) print "sec diff" ,sec2-sec1 print "time.time",time.time() delta1 = dt2-dt
def as_timestamp(self): import time return int(time.mktime(self.full_start_datetime.timetuple()))
def _convertDateTimeToUNIXTimestamp(self, date): time.mktime(datetime.datetime.strptime(date, "%d/%m/%Y").timetuple())
def check_goals(self): """ try: contextlist = self.all_access_model(cont=["Devices"]) except Exception,e: print "Devices not found error",e try: contextlist2 = self.all_access_model(cont=["Apps"]) except Exception,e: print "Apps not found error",e contextlist = contextlist + contextlist2 """ goalcontext = ['Goals','Health'] goalcontext_obj = self.all_access_model(cont=goalcontext) print goalcontext_obj import datetime today = datetime.datetime.now() current_date = date(today.year, today.month, today.day) app_goal_list = list() goal_list = ["CurrentLevelGoal","StepGoal","ModerateActivityGoal","IntenseActivityGoal","AvoidInactivityGoal"] for g in goal_list: if g in goalcontext_obj: goalcontext.append(g) show_chart = 0 #---Get the target value goal_target_val = self.get_evidence_new(context = goalcontext, componentid="target_value") print goal_target_val try: if goal_target_val: goal_target_v = (goal_target_val[-1].value).split(' ')[0] else: goal_target_v = "none" except Exception,e: print e #--Find out how many times in a week you are following the goal goal_target_freq = self.get_evidence_new(context = goalcontext, componentid="target_frequency") if goal_target_freq: goal_target_f = (goal_target_freq[-1].value).split(' ')[0] else: goal_target_f = "none" #--Find out when the goal was set goal_startdate = self.get_evidence_new(context=goalcontext, componentid="goal_startdate") #--How many days in between the goal set date goal_duration = self.get_evidence_new(context=goalcontext, componentid="goal_duration") if goal_duration[-1].value == "no change": for dd in goal_duration[::-1]: if dd.value != "no change": betweendays = self.get_days_in_between(dd.value) break else: betweendays = self.get_days_in_between(goal_duration[-1].value) #---Get the date when the goal is set and how many days have been passed #start_time = time.localtime(int(goal_startdate[-1].creation_time)) start_time = time.mktime(time.strptime(goal_startdate[-1].value, '%d/%m/%Y')) start_time = time.localtime(int(start_time)) goal_start_date = date(start_time[0], start_time[1], start_time[2]) print "This goal is set on ", goal_start_date print "This goal will go for", goal_duration[-1].value days_passed = (current_date - goal_start_date).days #---Get the date when the goal will end goal_end_date = goal_start_date + timedelta(days = betweendays) #--Find out whether the goal is still on. If it is still on, then show the chart and check how many days left. Beacuse there might be data from previous goal. days_left = 0 days_target_needed_to_be_met = 0 days_target_met = 0 if goal_end_date >= current_date: show_chart = 1 days_left = (goal_end_date - current_date).days email_id = self.get_evidence_new(context = ['Personal'], componentid="email") if email_id[-1].value == "*****@*****.**" and g == "CurrentLevelGoal": days_passed = 5 days_left = 2 show_chart = 1 days_target_needed_to_be_met = 0 days_target_met = 0 if email_id[-1].value == "*****@*****.**" and g == "StepGoal": days_passed = 0 days_left = 12 * 7 days_target_needed_to_be_met = 0 days_target_met = 0 if email_id[-1].value == "*****@*****.**" and g == "CurrentLevelGoal": days_passed = 7 days_left = 0 show_chart = 0 days_target_needed_to_be_met = 0 days_target_met = 0 if email_id[-1].value == "*****@*****.**" and g == "StepGoal": days_passed = 25 days_left = 9 * 7 + 3 show_chart = 1 #if goal_target_val[-1].flags[0] == "New": days_target_needed_to_be_met = 17 days_target_met = 14 #elif goal_target_val[-1].flags[0] == "Revised": # days_target_needed_to_be_met = 0 # days_target_met = 0 if email_id[-1].value == "*****@*****.**" and g == "CurrentLevelGoal": days_passed = 7 days_left = 0 show_chart = 0 if email_id[-1].value == "*****@*****.**" and g == "StepGoal": days_passed = 11 * 7 + 2 days_left = 5 show_chart = 1 #if goal_target_val[-1].flags[0] == "New": days_target_needed_to_be_met = 11 * 7 + 2 days_target_met = 70 print "Days:::",days_passed, days_left new_goal = goals_set(g, goal_target_v, goal_target_f, show_chart, days_passed, days_left, days_target_needed_to_be_met, days_target_met) app_goal_list.append(self.todict(new_goal, new_goal.name)) goalcontext.remove(g)
sizes = list(filter(lambda p: p.startswith('photo'), a['photo'].keys())) maxsize = max(list(map(lambda p: int(p.replace("photo_", '')), sizes))) data['photo_url'] = a['photo']['photo_' + str(maxsize)] data['photo_url_75'] = a['photo']['photo_75'] break if 'photo_75' in data.keys(): sizes = list(filter(lambda p: p.startswith('photo'), data.keys())) maxsize = max(list(map(lambda p: int(p.replace("photo_", '')), sizes))) data['photo_url'] = data['photo_' + str(maxsize)] data['photo_url_75'] = data['photo_75'] clients = set() to_unixtime = lambda datetime_: int(time.mktime(datetime_.timetuple())) def stream_new_posts(): q = {'date': {'$gte': to_unixtime(datetime.today())}} time.sleep(5) db = pymongo.MongoClient("192.168.13.133").VkFest coll = db.data cursor = coll.find(q, cursor_type=pymongo.CursorType.TAILABLE_AWAIT) while True: for doc in cursor: p = Post(doc) print(p.data)
def string_toTimestamp(strTime): return time.mktime(string_toDatetime(strTime).timetuple())
def datetime_toTimestamp(dateTim): return time.mktime(dateTim.timetuple())
def chart_nvd3(request): flights = M.Flight.objects.filter(owner=request.user).order_by('date') #import pdb;pdb.set_trace() # Collect stats per day landingsPerDay = [] timesPerDay = [] days = [] for flight_date, flightsPerDate in groupby(flights, key=extract_days): days.append(flight_date) #dayStat.append([flight_date, list(group)]) print('flight ', flight_date) for element in list(flightsPerDate): landingsPerDay.append(element.landings) # Store group iterator as a list timesPerDay.append(element.flightTime()) print(' landings:', element.landings, ' flightTime:', element.flightTime()) #import pdb;pdb.set_trace() # Collect stats per month landingsPerMonth = [] timesPerMonth = [] months= [] for flight_date, flightsPerDate in groupby(flights, key=extract_months): months.append(flight_date) #dayStat.append([flight_date, list(group)]) print('flight ', flight_date) for element in list(flightsPerDate): landingsPerMonth.append(element.landings) # Store group iterator as a list timesPerMonth.append(element.flightTime()) print(' landings:', element.landings, ' flightTime:', element.flightTime()) # Collect stats per year landingsPerYear = [] timesPerYear = [] years = [] for flight_date, flightsPerDate in groupby(flights, key=extract_years): years.append(flight_date) #dayStat.append([flight_date, list(group)]) print('flight ', flight_date) for element in list(flightsPerDate): landingsPerYear.append(element.landings) # Store group iterator as a list timesPerYear.append(element.flightTime()) print(' landings:', element.landings, ' flightTime:', element.flightTime()) list_of_lists = [list(g) for t, g in groupby(flights, key=extract_months)] #import pdb;pdb.set_trace() """ linewithfocuschart page """ nb_element = 100 start_time = int(time.mktime(datetime.datetime(2012, 6, 1).timetuple()) * 1000) totTime = [0] dualTime = [0] picTime = [0] landings = [30] landingsSum = 0 templateDict = dict(); templateDict['picTime'] = [] templateDict['dualTime'] = [] templateDict['totTime'] = [] templateDict['flightTime'] = [] picTimeSum = 0 dualTimeSum = 0 totTimeSum = 0 datax = [] graphLand = dict() graphLand['key'] = 'Landings' graphLand['bar'] = '' graphLand['values'] = [] flightTimeData= dict() flightTimeData['key'] = 'Total Flight Time' flightTimeData['bar'] = '' flightTimeData['values'] = [] picTimeData= dict() picTimeData['key'] = 'PIC Flight Time' picTimeData['bar'] = '' picTimeData['values'] = [] dualTimeData= dict() dualTimeData['key'] = 'Dual Flight Time' dualTimeData['bar'] = '' dualTimeData['values'] = [] startTime = datetime.datetime(2012, 6, 1) stopTime = datetime.datetime(2013, 6, 1) #n = flights[4].date#datetime.datetime(2012, 12,31) s = flights[0].date n = flights.latest('date').date #datetime.datetime(2012, 1,1) dateList = [] dateDict = dict() for i in range((n - s).days + 1): d = (s+datetime.timedelta(days = i)) dateList.append( int(time.mktime(d.timetuple()))*1000 ) dateDict[ int(time.mktime(d.timetuple()))*1000] = 0 #print (s+datetime.timedelta(days = i)).date() base = datetime.datetime.today() #dateList = [ int(time.mktime((base - datetime.timedelta(days=x)).timetuple()))*1000 for x in range(0,10) ] #for d in dateList: #templateDict['flightTime'].append([d, 0]) for flight in flights: date = int(time.mktime(flight.date.timetuple())) * 1000 graphLand['values'].append([date, flight.landings]) flightTimeData['values'].append([date, flight.flightTime()]) landingsSum += flight.landings landings.append(landings[len(landings)-1] + flight.landings) dualTime.append(dualTime[len(dualTime)-1] + flight.flightTime()/60/60) totTimeSum = totTimeSum + flight.flightTime() templateDict['totTime'].append([date, totTimeSum]) dateDict[date] = flight.flightTime() #templateDict['flightTime'].append([date, flight.flightTime()]) for k, v in dateDict.items(): templateDict['flightTime'].append([k, v]) pic = flights.filter(function = "pic") for flight in pic: date = int(time.mktime(flight.date.timetuple())) * 1000 picTimeData['values'].append([date, flight.flightTime()]) picTimeSum = picTimeSum + flight.flightTime() templateDict['picTime'].append([date, picTimeSum]) pic = flights.filter(function = "dual") for flight in pic: date = int(time.mktime(flight.date.timetuple())) * 1000 dualTimeSum = dualTimeSum + flight.flightTime() templateDict['dualTime'].append([date, dualTimeSum]) datax.append(graphLand) datax.append(flightTimeData) datax.append(picTimeData) xdata = range(len(flights)+1) #xdata = map(lambda x: start_time + x * 1000000000, xdata) ydata = [i + random.randint(1, 10) for i in range(nb_element)] ydata2 = map(lambda x: x * 2, ydata) ydata3 = map(lambda x: x * 3, ydata) ydata4 = map(lambda x: x * 4, ydata) #xdata = landings; ydata = landings; ydata2 = dualTime; ydata3 = [2,1,2,1,2]; ydata4 = [2,1,2,1,2]; #tooltip_date = "%d %b %Y %H:%M:%S %p" tooltip_date = "%d %b %Y" extra_serie = {"tooltip": {"y_start": "", "y_end": " landings"}, "date_format": tooltip_date} chartdata = { 'x': xdata, 'name1': 'Landings', 'y1': ydata, 'extra1': extra_serie, 'name2': 'Flight time', 'y2': ydata2, 'extra2': extra_serie, #'name3': 'series 3', 'y3': ydata3, 'extra3': extra_serie, #'name4': 'series 4', 'y4': ydata4, 'extra4': extra_serie } charttype = "lineWithFocusChart" data = { 'charttype': charttype, 'chartdata': chartdata } return django.shortcuts.render(request,'flightlog/piechart.html', {'graphLand' : datax , 'templateDict': templateDict})
def get_seconds(d): import time return time.mktime(d.utctimetuple())
def json_encoder(obj): if isinstance(object, (datetime, date)): return time.mktime(obj.timetuple()) raise TypeError('Object of type %s with value of %s is not JSON serializable' % (type(obj), repr(obj)))
def main(): val = datetime.utcnow() - timedelta(hours=4) val = str(int(time.mktime(val.timetuple()))) l = FirebaseBackup().firebase_query('resplendent-fire-1812','ideas') FirebaseBackup().firebase_put(l,'resplendent-fire-1812','backup/ideas/'+val)
to0 = [] ti0 = [] ts0 = [] to1 = [] ti1 = [] ts1 = [] y = [] x = [[] for x in range(5)] #read data reader = csv.reader(open('Item13.txt', 'r')) for row in reader: dt = datetime.strptime(row[0], "%Y-%m-%d %H:%M:%S") dt = int(time.mktime(dt.timetuple())) if(dt >= tmin): ti0.append(dt) ti1.append(float(row[1])) reader = csv.reader(open('Item5.txt', 'r')) for row in reader: dt = datetime.strptime(row[0], "%Y-%m-%d %H:%M:%S") dt = int(time.mktime(dt.timetuple())) if(dt >= tmin): to0.append(dt) to1.append(float(row[1])) reader = csv.reader(open('Item10.txt', 'r')) for row in reader: dt = datetime.strptime(row[0], "%Y-%m-%d %H:%M:%S")
#read from file line by line #convert input string to datetime format, #compare two consecutive dates for periodicity from datetime import datetime, date, time import time filepath = "./input.txt" timeformat = "%Y-%m-%d %H:%M:%S" periodicity = 60 with open(filepath) as f: content = f.readlines() for i in range(len(content)-1): dateString1 = content[i].strip() dateString2 = content[i+1].strip() if len(dateString1)!=0 and len(dateString2)!=0: dt1 = datetime.strptime(dateString1, timeformat) sec1=time.mktime(dt1.timetuple()) dt2 = datetime.strptime(dateString2, timeformat) sec2 = time.mktime(dt2.timetuple()) timeDiff = dt2-dt1 secDiff = sec2-sec1 if secDiff != periodicity: print "error" f.close()