def orders1(request): # 获取post表单内容 stime = request.POST['stime'] etime = request.POST['etime'] t = request.POST['type'] # 利用集合set判断时间是否可用 dt1 = datetime.combine(date.today(), time.min) dt2 = datetime.combine(date.today(), time(23, 59, 59)) ds1 = int(datetime.timestamp(dt1)) ds2 = int(datetime.timestamp(dt2)) fullset = set(range(ds1, ds2 + 1, 60)) # 00:00:00~23:59:59 st = int(float(stime)) et = int(float(etime)) time_need = set(range(st, et + 1, 60)) pos = Position.objects.filter(type=t) pos_amount = len(pos) pos_available = [] for i in range(pos_amount): time_available = fullset order = pos[i].order_set.filter(status=0) order_amount = len(order) for j in range(order_amount): s = int(datetime.timestamp(order[j].stime)) e = int(datetime.timestamp(order[j].etime)) time_used = set(range(s, e, 60)) # 解决区间端点的问题 time_available = time_available - time_used if time_need <= time_available: pos_available.append(pos[i].id) return JsonResponse(pos_available, safe=False)
def to_timestamp(dt_str=None, tz_str=None): t_re = r'[\d]{4}\-(\d|[0-1]\d)\-(\d|[0-2]\d|3[0-1])\s+?(2[0-4]|[0-1]\d|\d)[\:]([0-5]\d|\d)[\:]([0-5]\d|\d)' utc_re = r'UTC([\-|\+])((2[0-4]|[0-1]\d|\d):([0-5]\d|\d))' if dt_str is not None: if re.match(t_re,dt_str): if tz_str is not None: if re.match(utc_re,tz_str): utc_list = re.match(utc_re,tz_str).groups() if len(utc_list) == 4: hours_num = int(utc_list[0]+utc_list[2]) t = datetime.strptime(dt_str,'%Y-%m-%d %H:%M:%S') utc_time = timezone(timedelta(hours=hours_num)) ts = t.replace(tzinfo=utc_time) ts = datetime.timestamp(ts) print(ts) else: print('UTC格式不正确') else: t = datetime.strptime(dt_str,'%Y-%m-%d %H:%M:%S') utc_time = timezone(timedelta(hours=8)) ts = t.replace(tzinfo=utc_time) ts = datetime.timestamp(ts) print(ts) else: print('时间格式不正确!') else: print('缺少必要的参数,请确保传入了日期与时间值')
def create_links(_args): base = 'http://jscharts-e-barchart.aws.barchart.com//charts/update_dynamic_zoom?callback=Request.JSONP.request_map.request_1&data_time=daily&symbol={}&end={}&start={}&cookie_index=0' symbol = _args.symbol end = int(datetime.timestamp(datetime.strptime(str(_args.end)+'050000', '%Y%m%d%H%M%S'))*1000) start = int(datetime.timestamp(datetime.strptime(str(_args.start)+'050000', '%Y%m%d%H%M%S'))*1000) link = base.format(symbol, end, start) return link
def download_token(self, download_url, expires=3600): url = download_url if download_url.find("?") >= 0: url += "&e=" + \ str(int(datetime.timestamp(datetime.now())) + expires) else: url += '?e=' + \ str(int(datetime.timestamp(datetime.now())) + expires) download_token = self.access_token(url) return download_token
def channel_videos(request, pk): channel = Channel.objects.get(pk=pk) latest_stats_ids = Video.objects.filter(channel=pk).annotate(latest_stats_id=Max('statistics__id')) \ .values_list('latest_stats_id', flat=True) video_stats = VideoStats.objects.filter(id__in=latest_stats_ids).order_by('video__published_at') # highcharts series = [] rec = {} rec["name"] = "Channel name" rec["data"] = [] for stat in video_stats: published_at = dt.timestamp(stat.video.published_at) * 1000 view_count = stat.view_count rec["data"].append([published_at, view_count]) series.append(rec) # d3 charts d3data = [] for stat in video_stats: published_at = dt.timestamp(stat.video.published_at) * 1000 view_count = stat.view_count like_count = stat.like_count d3data.append([published_at, view_count, like_count]) if request.method == 'POST': playlist_id = channel.playlist_id channel_videos_meta = get_videos_meta_info(playlist_id) # ids_from_db = [stat.video.video_id for stat in video_stats] videos = Video.objects.filter(channel_id=pk) ids_from_db = [vid.video_id for vid in videos] new_videos = [] for vid in channel_videos_meta: if vid['video_id'] not in ids_from_db: v = Video( video_id=vid['video_id'], title=vid['title'], published_at=vid['published_at'], channel_id=pk ) new_videos.append(v) Video.objects.bulk_create(new_videos) return redirect('channel_videos', pk=pk) return render(request, 'channel/channel_videos.html', { 'channelname': channel.username, 'video_stats': video_stats, 'series': series, 'd3data': d3data, })
def fetch_awards_by_month(self, year, month): next_month = (month + 1) if (month < 12) else 1 next_year = year if (next_month > 1) else (year + 1) with self.db: cur = self.db.cursor() cur.execute('''SELECT * FROM awards WHERE (awarding_comment_time >= ? AND awarding_comment_time < ?)''', (datetime.timestamp(datetime(year, month, 1, 0, 0, 0)), datetime.timestamp(datetime(next_year, next_month, 1, 0, 0, 0)))) awards = cur.fetchall() return [dict(award) for award in awards]
def update(self, data): if data is None: return # make a list of id of recent finished tasks completed = [] self._tasks_mod.clear() for task in data["tasks"]: tid = task["id"] self._tasks_mod[tid] = task if tid not in self._tasks: completed.append(tid) self._tasks = self._tasks_mod.copy() if self._initialized: # prevent already-completed tasks firing sigTaskCompleted # when ETM starting later than frontend # by comparing `completeTime` with `timestamp` # threshold: 10 secs timestamp = datetime.timestamp(datetime.now()) for completedId in completed: if 0 <= timestamp - self._tasks[completedId]["completeTime"] <= 10: self.sigTaskCompleted.emit(completedId) else: self._initialized = True
def gettimestamp(): confirm = 0 while (confirm == 0): indate = input("\nEnter the (local!) start date of the audio file (format: MM-DD-YYYY) ") intime = input("Enter the (also local!) start time of the audio file to the nearest second, 24 hour clock. Midnight is 00:00:00. (format: HH:MM:SS) ") bool_in_is_dst = query_yes_no("\nIs DST active at the start of the audio file? (If you're past the point of \"spring forward\", then answer yes). \nIf you're not sure about DST, make sure to look up what second DST takes effect. Or just avoid having audio files starting during DST.", None) if bool_in_is_dst == True: in_is_dst = "PDT" offset="-0700" #yes I've painstainkingly confirmed that these offsets are working as intended. Programming with timezones, not even once. else: in_is_dst = "PST" offset = "-0800" print('\n') datestring = indate + " " + intime + offset human_readable_time = datetime.strptime(indate+" "+intime, "%d-%m-%Y %H:%M:%S") start_time = parser.parse(datestring) #convert the time we have to a posix timestamp start_timestamp = int(datetime.timestamp(start_time)) temp_string = "You entered the following information: (YYYY-MM-DD HH:MM:SS) \n%s %s \nPOSIX TIMESTAMP - %s \nIs this information correct?" % (human_readable_time,in_is_dst,start_timestamp); response = query_yes_no(temp_string, None) if response == True: confirm = 1; else: print ("\n Okay, I'll ask you again ... \n \n") sleep(1) confirm = 0; return str(start_timestamp)
def is_old(): try: time = os.path.getmtime(CACHE_FILE) expire = datetime.timestamp(datetime.now() - timedelta(hours=1)) return time < expire except: return False
def fetch_recent_dispo_logs(self, days): with self.db: cur = self.db.cursor() cur.execute('SELECT * FROM dispo_log WHERE comment_time > ?', (datetime.timestamp(datetime.utcnow() - timedelta(days=days)),)) rows = cur.fetchall() return [dict(row) for row in rows]
def index(): if request.method == "POST": global BOARDS global POSTS board = request.form["board"].lower() if request.form.get("board", None) else "" desc = request.form["desc"] if request.form.get("desc", None) else "" if board in BOARDS: flash("That board already exists!", "error") return render_template("newboard.html", board=board) if invalid_board_name(board) or invalid_board_name(desc, desc=True): if invalid_board_name(board): flash("Invalid board name! (alphabet, 1-6 characters, unique)", "error") if invalid_board_name(desc, desc=True): flash("Invalid description! (alphanumeripunctual, 1-30 chars)", "error") logging.debug("Someone tried to create an invalid board: {} ({})".format(board, desc)) return render_template("newboard.html", board=board) try: mkdir(ROOT + "boards/" + board) except IOError: flash("Unable to create board :O", "error") logging.exception("User was unable to create board") return render_template("newboard.html", board=board) with open(ROOT + "boardlist", "w") as f: BOARDS[board] = desc dump(BOARDS, f) with open(ROOT + "boards/" + board + "/index", "w") as f: dump([[1, "GET", [int(datetime.timestamp(datetime.utcnow())), 1, "first"]]], f) with open(ROOT + "postnums", "w") as f: POSTS[board] = 1 dump(POSTS, f) flash("Success? :O", "success") logging.info("New board created: " + board) return render_template("newboard.html", board=board) return render_template("index.html", boards=BOARDS)
def report(**kwargs): if validate_init() is True: engine = create_engine("sqlite:///" + tracker_db, echo=False) Session = sessionmaker(bind=engine) session = Session() n = session.query(Task) for key in kwargs: current_time = datetime.utcnow() if key == 'days' and kwargs[key] != '': days_ago = current_time - timedelta(days=int(kwargs[key])) n = n.filter(and_(Task.start >= int(datetime.timestamp(days_ago)))) if key == 'category' and kwargs[key] != '': n = n.filter(and_(Task.category == kwargs[key])) if key == 'task' and kwargs[key] != '': n = n.filter(and_(Task.name == kwargs[key])) n.all() if not n: print('Can not find anything with this parameters. Return all.') n = session.query(Task).all() total = timedelta(seconds=0) template = "{0:2}|{1:15}|{2:65}|{3:15}|{4:20}|" print(template.format('Id', 'Category', 'Name', 'Spent hours', 'Date')) for i in n: diff = (i.stop - i.start) td = timedelta(seconds=diff) total = total + td d = datetime.fromtimestamp(i.start) print(template.format(i.id, i.category, i.name, str(td), str(d))) print('*****************') print('Total: %s (hh:mm:ss)' % total)
def execute(bot, data): exclude = 75615891 ToBan = bot.GroupApi.groups.getMembers(group_id=data['custom']["id"])['items'] if data['custom']["id"] == exclude: return args = {'v': "5.60", 'group_id': exclude} uid = int(data['custom']["id"]) if "причина" in data['custom']: reason = int(args["причина"]) args['reason'] = reason if "группа" in data['custom']: args['group_id'] = data['custom']["группа"] else: args['group_id'] = bot.Group.replace("-", "") args['user_id'] = uid if data['custom']["комментарий"]: comment = data['custom']["комментарий"] args['comment'] = comment args['comment_visible'] = 1 if "время" in args: end_date = datetime.timestamp(datetime.now() + timedelta(hours=int(args["время"]))) args["end_date"] = end_date for user in ToBan: args['user_id'] = user bot.GroupApi.groups.banUser(**args)
def create_assno(): """ 生成一个唯一的资产ID号,12位,用时间戳 :return: """ assetno = int(datetime.timestamp(datetime.now()) * 10000) return str(assetno)
def fuse(self, other, conflict="rename_other"): """ Fuse another entity to current entity """ # TODO: Fuse zope interfaces (after they will be implemented) # TODO: Change the class of the current entity if # the other entity is the subtype of the current if not (self.meta['hierarchy'].startswith(other.meta['hierarchy']) or other.meta['hierarchy'].startswith(self.meta['hierarchy'])): raise Exception("Cannot fuse nodes: they are not in subclass-superclass relations") current_attrs = set(self.attributes.keys()) other_attrs = set(other.attributes.keys()) conflict_attrs = current_attrs & other_attrs for attr in conflict_attrs: if self.attributes[attr] != other.attributes[attr]: now = str(int(datetime.timestamp(datetime.now()))) if conflict == 'keep_self': pass elif conflict == 'keep_other': self.attributes[attr] = other.attributes[attr] elif conflict == 'rename_self': self.attributes[attr+'_'+now] = self.attributes[attr] self.attributes[attr] = other.attributes[attr] elif conflict == 'rename_other': self.attributes[attr+'_'+now] = other.attributes[attr] else: raise AttributeError("Incorrect conflict value") attrs_to_add = other_attrs - current_attrs for attr in attrs_to_add: self.attributes[attr] = other.attributes[attr]
def getorderstatus(request): user = request.POST['username'] u = get_object_or_404(User, username=user) o = Order.objects.filter(user=u).last() s = o.status # 每个用户只能用一个未完成的订单状态 # 如果正在充电,则返回充电百分比 r = 'None' if s == 0: # 充电未完成 r = {'stime': int(datetime.timestamp(o.stime)), 'etime': int(datetime.timestamp(o.etime)), 'pid': o.position_id, 'status': o.status, 'code': o.code} elif s == 2: # 充电过程中,返回充电百分比 r = {'stime': int(datetime.timestamp(o.stime)), 'etime': int(datetime.timestamp(o.etime)), 'pid': o.position_id, 'status': o.charge_p, 'code': o.code} else: # 订单已完成 r = {'stime': 0, 'etime': 0, 'pid': 0, 'status': o.status, 'code': 'xxxx'} return JsonResponse(r)
def _get_timestamp(): """ 向 http://www.toutiao.com/search_content/ 发送的请求的参数包含一个时间戳, 该函数获取当前时间戳,并格式化成头条接收的格式。格式为 datetime.today() 返回 的值去掉小数点后取第一位到倒数第三位的数字。 """ row_timestamp = str(datetime.timestamp(datetime.today())) return row_timestamp.replace('.', '')[:-3]
def my_text(message): # We don't need this crap in channel if message.text == "/start": return tz = pytz.timezone(config.timezone) msg_to_channel = bot.send_message(config.channel_id, message.text, parse_mode="HTML") dbhelper.insert(message.text.replace("\n","<br />"), formatdate(datetime.timestamp(datetime.now(tz)), localtime=True), msg_to_channel.message_id) generate_feed()
def import_data(eng, table_name="10_op_point_test"): """ Import data from database into arrays. Args: eng (engine): database engine object from which to pull the data table_name (string, default="10_op_point_test"): name of table from which to pull the data Returns: dict containing table data as numpy arrays """ # Initialize some stuff data_dict = {} index = 0 cur = eng.cursor() cur.execute("SELECT COUNT(*) FROM {}".format(table_name)) num_rows = list(cur.fetchone().values())[0] - 1 # subtract 1 for the first (initialization) row progress = np.floor(num_rows/50) cur.execute("SELECT * FROM {}".format(table_name)) key_list = list(cur.fetchone().keys()) # skip the first initialization row print("Number of rows: {}".format(num_rows-1)) # take off a row for initialization print("Keys: {}".format(key_list)) print("Loading data ", end="", sep="", flush=True) try: # Initialize the data dictionary with first row data row = cur.fetchone() data_dict["atmosphericP"] = row["atmosphericP"] # atmosphericP (only read once) data_dict["opPointDes"] = np.empty((num_rows, get_array(row["opPointDes"]).size)) # opPointDes data_dict["opPointAct"] = np.empty((num_rows, get_array(row["opPointAct"]).size)) # opPointAct data_dict["flameStatus"] = np.empty((num_rows, 1)) # flameStatus data_dict["dateTimeStamp"] = np.empty((num_rows, 1)) # dateTimeStamp data_dict["staticP"] = np.empty((num_rows, 1)) # staticP data_dict["temperature"] = np.empty((num_rows, get_array(row["temperature"]).size)) # temperature dynP_len = get_array(row["dynamicP"]).shape data_dict["dynamicP"] = np.empty((dynP_len[0], dynP_len[1] * (num_rows))) # dynamicP # Build data dictionary one row at a time for index in range(0, num_rows): # already used first row hop = index*dynP_len[1] if index % progress == 0: print(".", end="", sep="", flush=True) data_dict["opPointDes"][index][:] = get_array(row["opPointDes"]) # desired flow voltage data_dict["opPointAct"][index][:] = get_array(row["opPointAct"]) # current flow voltage data_dict["flameStatus"][index] = row["flameStatus"] # flame status data_dict["dateTimeStamp"][index] = datetime.timestamp(datetime.strptime(row["dateTimeStamp"], "%Y-%m-%d@%H:%M:%S.%f")) # date time stamp data_dict["staticP"][index] = row["staticP"] # static pressure, psi data_dict["temperature"][index][:] = get_array(row["temperature"]) # temperature readings, C data_dict["dynamicP"][:, hop:hop+dynP_len[1]] = get_array(row["dynamicP"]) # dynamic pressure measurements row = cur.fetchone() eng.close() data_dict["time"] = np.array([dt - data_dict["dateTimeStamp"][0] for dt in data_dict["dateTimeStamp"]]) except: print("Error at index {}".format(index)) traceback.print_exc() print(" done!") return data_dict
def klass(self, value): oldClass = self._klass self._klass = value if oldClass == self._klass: return if self._klass == TaskClass.COMPLETED: timestamp = datetime.timestamp(datetime.now()) if 0 <= timestamp - self.completionTime <= 10: self.__taskModel.taskCompleted.emit(self)
def auth_headers_request(public, secret, headers=None): headers = headers or {} salt = str(datetime.timestamp(datetime.now())) signature = auth_signature(secret, salt) headers['Salt'] = salt headers['Authorization'] = '%s: %s' % (public, signature) return headers
def _gen_private_url(self, key, host, expires=3600): assert host != None and host != "", "download host can' be empty" if not host.startswith("http://"): host = "http://" + host download_url = host + '/' + key token = self._auth.download_token(download_url, expires=expires) download_url += '?e=' + \ str(int(datetime.timestamp(datetime.now())) + expires) download_url += "&token=" + token return download_url
def get_intraday_data(symbol, interval=60, days=1, end_time=datetime.timestamp(datetime.now())): """ Retrieve intraday stock data from Google Finance. Parameters ---------- symbol : str Stock symbol. interval : int Interval between stock values in seconds. days : int Number of days of data to retrieve. end_time : float End time to retrieve. In second with UNIX format (nb of seconds from 01/01/1970 UTC). Returns ------- df : pandas.DataFrame DataFrame containing the "Datetime", "Close","High","Low","Open","Volume". """ url_web = "http://www.google.com/finance/getprices" params = { 'q': symbol, 'i': str(interval), 'p': str(days) + 'd', 'ts': str(int(end_time * 1000)), 'f': 'd,o,h,l,c,v' } # retrieve data from url : r = requests.get(url_web, params = params) # split each line : r = r.text.split() # remove 7 first line : r = r[7:] # split each line. r will be lists in a list : r = [l.split(",") for l in r] # convert to a pandas DataFrame : DF = pd.DataFrame(r, columns=["Datetime","Close","High","Low","Open","Volume"]) # remove the "a" character for the first timestamp : DF["Datetime"][0] = DF["Datetime"][0][1:] # convert the time stamp. It's presented in UNIX format. # Which represents the seconds from 1st January 1970 UTC. DF["Datetime"][0] = datetime.fromtimestamp(float(DF["Datetime"][0])) # convert the next timestamp : DF["Datetime"][1:] = [DF["Datetime"][0] + int(x) * timedelta(seconds=interval) for x in DF["Datetime"][1:]] # return : return DF
def linkdir(linkname, target): parent_dir = os.path.dirname(os.path.realpath(linkname)) if fake_operate: if os.path.realpath(linkname) == target: return False if os.path.lexists(linkname): postfix = str(datetime.timestamp(datetime.now())) print('mv {0} {1}'.format(linkname, linkname + '.' + postfix)) print('mkdir -p {0}'.format(parent_dir)) print('ln -s {0} {1}'.format(target, linkname)) else: if os.path.realpath(linkname) == target: return False if os.path.lexists(linkname): postfix = str(datetime.timestamp(datetime.now())) os.system('mv {0} {1}'.format(linkname, linkname + '.' + postfix)) os.system('mkdir -p {0}'.format(parent_dir)) os.system('ln -s {0} {1}'.format(target, linkname)) write_linkrec(linkname, target)
def convert_to_ts(iso_string): ''' Adds trailing zeros to rounded fraction of iso8061 datetime, then create a timestamp from the formatted string ''' ts, fraction = iso_string.rsplit('.', maxsplit=1) len_fraction = len(fraction) if len_fraction < 6: trailing_zeroes = '0' * (6-len_fraction) iso_string = ts + '.' + fraction + trailing_zeroes dt = datetime.fromisoformat(iso_string) ts = datetime.timestamp(dt) return ts
def make_timestamp(self, value: DateTypes): if isinstance(value, datetime): return datetime.timestamp() elif isinstance(value, date): return datetime.combine(value, datetime.max.time()).timestamp() elif isinstance(value, str): if value.isdigit(): return value else: try: float(value) return value except ValueError: # The value can also be '1d', '10h', ... return value else: return value
def get_requests(item_name, tier=1, enchantment=0, quality=0, after_ts=1000): graph = Graph(password='******') current_ts = datetime.timestamp(datetime.now()) query = f''' MATCH (:Character)-[r:request]->(i:Item)<-[o:offer]-(:Character) WHERE i.Group = "{item_name}" AND i.Tier = {tier} AND i.Enchantment = {enchantment} AND i.Quality = {quality} AND ({current_ts} - r.LastViewed) < {after_ts} AND ({current_ts} - o.LastViewed) < {after_ts} AND (r.UnitPriceSilver < o.UnitPriceSilver) RETURN i, (r.UnitPriceSilver - o.UnitPriceSilver) as profit ORDER BY profit ''' return graph.run(query)
def gettimestampfromfile( name ): #parse the filename year = name[:4] month = name[5:7] day = name[8:10] hour = name[11:13] minute = name[14:16] second = name[17:19] indate = day + "-" + month + "-" + year intime = hour + ":" + minute + ":" + second local = pytz.timezone ("America/Los_Angeles") naive = datetime.strptime (indate+" "+intime, "%d-%m-%Y %H:%M:%S") local_dt = local.localize(naive, is_dst=None) utc_dt = local_dt.astimezone (pytz.utc) start = datetime.timestamp(utc_dt) return str(int(start))
def setResult(self, winUser, lossUser): # get winUser and lossUser class. and calculating increase/decrease point and apply method currTimeStamp = datetime.timestamp(datetime.today()) incDecRating = round(self.maxIncreaseRating * 1 / (1 + 10 ** ((winUser.rating - lossUser.rating) / 400))) winUser.win += 1 lossUser.loss += 1 winUser.rating += incDecRating lossUser.rating -= incDecRating #DICT INFO : result 0 - loss, 1 - win, opponentUUID - nuff said. winUser.history.append({'result': 1,'opponentUUID': lossUser.uuid, 'date':currTimeStamp}) lossUser.history.append({'result': 0,'opponentUUID': winUser.uuid, 'date':currTimeStamp}) if self.debugMode == 1: print("[Winner : " + winUser.name + " W:" + str(winUser.win) + " L:" + str(winUser.loss) + " Rating:" + str(winUser.rating) + "(+" + str(incDecRating) + ")" + "] [Loser : " + lossUser.name + " W:" + str(lossUser.win) + " L:" + str(lossUser.loss) + " Rating:" + str(lossUser.rating) + "(-" + str(incDecRating) + ")" + "]")
def test_item_emit_completed(self): taskModel = mock.Mock() # Initialize i = XwareTaskItem(namespace = "foo", taskModel = taskModel) i.update(_mockPayloadFactory(), xwareKlass = XwareTaskClass.RUNNING) self.assertRaises(AssertionError, taskModel.taskCompleted.emit.assert_called_once_with, i) # Make it complete i.update(_mockPayloadFactory(completeTime = datetime.timestamp(datetime.now()), progress = 10000), xwareKlass = XwareTaskClass.COMPLETED) taskModel.taskCompleted.emit.assert_called_once_with(i) # Don't emit long-ago completed i.update(_mockPayloadFactory(completeTime = 1, progress = 10000), xwareKlass = XwareTaskClass.COMPLETED) taskModel.taskCompleted.emit.assert_called_once_with(i)
async def on_message(message): # Do not want the bot to reply to itself if message.author == client.user: return if message.content.startswith('!test'): await message.channel.send(message.channel.type) # Message viene de un canal de texto (no mensaje directo) if str(message.channel.type) == "text": # Help if message.content.startswith('!help'): msg = ''' !join - !start - !help - this page !whisper - In DM: !stats ''' await message.channel.send(msg) # Test susurro if message.content.startswith('!whisper'): #user=await client.get_user_info(message.author.id) await message.author.send("I'm a very tall midget") # Unirse a partida if message.content.startswith('!join'): current_channel = str(message.channel.guild) + str( message.channel.id) msg = 'User {0.author.mention} is included in the {1} game'.format( message, current_channel) # Si ya existe un juego activo en este canal if current_channel in games: # No permitir jugadores repetidos if message.author in games[current_channel]: msg = '{0.author.mention} is already in {1} game'.format( message, current_channel) else: games.addUser(current_channel, message.author) else: games.addChannel(current_channel) games.addUser(current_channel, message.author) await message.channel.send(msg) # Ver los usuarios en el juego actual en el canal actual if message.content.startswith('!users'): current_channel = str(message.channel.guild) + str( message.channel.id) msg = '''Joined users in game: ''' if current_channel in games: for user in games[current_channel]: msg += '''user {} '''.format(user) else: msg = ''' No users in THE GAME''' await message.channel.send(msg) # Inicia el juego en este canal if message.content.startswith('!start'): current_channel = str(message.channel.guild) + str( message.channel.id) if current_channel in games: if len(games[current_channel]) < 1: #TODO 3 msg = '''Not enough people''' elif len(games[current_channel]) > 8: msg = '''Too much people''' else: msg = '''The GAME starts''' # Games es un diccionario: key = canal, val = lista de usuarios for user in games[current_channel]: users[user.id] = current_channel await client.get_user(user.id).send("Private DM") #TODO: juego iniciado asyncronamente game = Game(players=[ Game.Player(i, user.id, {'active': True}, { 'gems': 0, 'p_gems': 0 }) for i, user in enumerate(games[current_channel]) ], ) threading.Thread(target=game.main_loop).start() await message.channel.send(msg) await message.channel.send("Game ") #Check en el futuro (?) timestamps[current_channel] = int( datetime.timestamp(datetime.now())) for user in games[current_channel]: usersresponse[current_channel] = {user: 0} # await check_turn(client,timestamps, usersresponse) # channelgame[current_channel] = Game() # resp = channelgame[current_channel].Start() else: # Private DM if message.content.startswith('!stats'): if message.author.id in users: msg = str(users[message.author.id]) + str( usersresponse[users[message.author.id]][message.author.id]) else: msg = "No game has started " await message.author.send(msg) # Jugador quiere seguir if message.content.startswith('!continue'): if message.author.id in users: # usersrespones es un dict: key=channelID, val = lista de usuarios en ese juego usersresponse[users[message.author.id]][message.author.id] = 1 await check_turn(client, timestamps, usersresponse) # Jugador no quiere seguir if message.content.startswith('!leave'): if message.author.id in users: usersresponse[users[message.author.id]][message.author.id] = -1 await check_turn(client, timestamps, usersresponse)
def query_history(self, req: HistoryRequest) -> List[BarData]: """""" history = [] limit = 1000 start_time = int(datetime.timestamp(req.start)) while True: # Create query params params = { "symbol": req.symbol, "interval": INTERVAL_VT2BINANCEF[req.interval], "limit": limit, "startTime": start_time * 1000, # convert to millisecond } # Add end time if specified if req.end: end_time = int(datetime.timestamp(req.end)) params["endTime"] = end_time * 1000 # convert to millisecond # Get response from server resp = self.request("GET", "/fapi/v1/klines", data={"security": Security.NONE}, params=params) # Break if request failed with other status code if resp.status_code // 100 != 2: msg = f"获取历史数据失败,状态码:{resp.status_code},信息:{resp.text}" self.gateway.write_log(msg) break else: data = resp.json() if not data: msg = f"获取历史数据为空,开始时间:{start_time}" self.gateway.write_log(msg) break buf = [] for l in data: dt = datetime.fromtimestamp(l[0] / 1000) # convert to second bar = BarData(symbol=req.symbol, exchange=req.exchange, datetime=dt, interval=req.interval, volume=float(l[5]), open_price=float(l[1]), high_price=float(l[2]), low_price=float(l[3]), close_price=float(l[4]), gateway_name=self.gateway_name) buf.append(bar) history.extend(buf) begin = buf[0].datetime end = buf[-1].datetime msg = f"获取历史数据成功,{req.symbol} - {req.interval.value},{begin} - {end}" self.gateway.write_log(msg) # Break if total data count less than limit (latest date collected) if len(data) < limit: break # Update start time start_dt = bar.datetime + TIMEDELTA_MAP[req.interval] start_time = int(datetime.timestamp(start_dt)) return history
def _deserialize(self, value, attr, data, **kwargs): val = super()._deserialize(value, attr, data, **kwargs) return datetime.timestamp(val)
objDate = date(year=2020, month=1, day=28) print("Date", objDate) objDate = time(hour=13, minute=1, second=31) print("Time", objDate) objDateTime = datetime(year=2020, month=1, day=28, hour=13, minute=14, second=31) print("Datetime", objDateTime, objDateTime) stamp = datetime.timestamp(objDateTime) print("Date and Time ", objDateTime, "->Timestamp", stamp) dateTimeFromTimestamp = datetime.fromtimestamp(stamp) print("Timestamp:", stamp, "->Date and Time ", dateTimeFromTimestamp) date1 = datetime(year=2020, month=1, day=31, hour=1, minute=3, second=5) date2 = datetime(year=2030, month=3, day=31, hour=12, minute=13, second=5) delta = date2 - date1 a = 10 - 19 print("TimeDelta difference :", date2, "-", date1, "=", delta, type(delta)) myString = "2020-10-16" myDate = datetime.strptime(myString, "%Y-%m-%d")
# CSV file samples = inputDir + "/samples.csv" with tf.io.TFRecordWriter(outputFile) as writer: with open(samples, newline='') as csvfile: spamreader = csv.reader(csvfile, delimiter=',', quotechar='|') for row in spamreader: date = row[0] hour = row[1] date_hour = date + "," + hour temp = int(row[2]) humidity = int(row[3]) audioFile = str(row[4]) date_hour = datetime.strptime(date_hour, '%d/%m/%Y,%H:%M:%S') posix = datetime.timestamp(date_hour) posix_int = int(posix) audio = tf.io.read_file(inputDir + "/" + audioFile) audio = audio.numpy() posix_date_hour_feature = tf.train.Feature( int64_list=tf.train.Int64List(value=[posix_int])) temp_feature = tf.train.Feature(int64_list=tf.train.Int64List( value=[temp])) humidity_feature = tf.train.Feature(int64_list=tf.train.Int64List( value=[humidity])) audioFile_feature = tf.train.Feature(bytes_list=tf.train.BytesList( value=[audio]))
def pretty_print_pass_time(self): now = datetime.timestamp(datetime.now())*1000 delta = (self.start - now)/1000.0/60.0 return f"The pass will start in {delta:.1f} minutes"
def seed(fake_date=None): """ Replaces the contents of the existing NytLiveCounty database with the last 14 days of data from the NYT github repo fake_date is for testing purposes - it forces the function to populate the database assuming that today is fake_date - type is datetime """ global STALE_DATE # Clear existing database # db.query(models.NytLiveCounty).delete() # Check if repo needs to be pulled otherwise make sure it's on master check_and_reset_repo() # Initialize repo object repo = Repo("covid-19-data") # Set current commit # cmt = repo.heads.master.commit cmt = repo.head.commit # If testing, crawl back in time to fake date if fake_date is not None: # cmt_date = pytz.UTC.localize(cmt.authored_datetime) while cmt.authored_datetime > pytz.UTC.localize(fake_date): cmt = cmt.parents[0] # Assumes no branchpoints :/ stale_date = datetime.timestamp(fake_date) - DB_AGE_LIMIT STALE_DATE = get_day_from_ts(stale_date) def get_ts(commit): return commit.authored_datetime.timestamp() while get_day_from_ts(get_ts(cmt)) > STALE_DATE: # Checkout data run_git_command( f"cd covid-19-data && git checkout -f {cmt.hexsha} && cd ../" ) # Load data # add_data(db, "covid-19-data/live/us-counties.csv", cmt.hexsha) df = pd.read_csv("covid-19-data/live/us-counties.csv", dtype=str) df = df[df["fips"].notna()] add_data(df, cmt.hexsha) # Select next data to load yesterday = get_day_from_ts(get_ts(cmt)) - 1 while get_day_from_ts(get_ts(cmt)) != yesterday: cmt = cmt.parents[0] # Assumes no branchpoints :/ # clear old data db = next(get_db()) try: old_recs_count = ( db.query(models.NytLiveCounty).filter( models.NytLiveCounty.timestamp < FIFTEEN_DAYS_AGO ) # .all() .delete() ) print(f"Old recs count: {old_recs_count}") # for rec in old_recs: # db.delete(rec) if fake_date is not None: too_new_recs_count = ( db.query(models.NytLiveCounty).filter( models.NytLiveCounty.date > fake_date ) # .all() .delete() ) print(f"too_new_recs_count: {too_new_recs_count}") # for rec in too_new_recs: # db.delete(rec) db.commit() except Exception: traceback.print_exc() print("ABANDONING NYT DATA POPULATION") db.rollback()
jwt = JWTManager(app) CORS(app) client = KazooClient(hosts=app.config['ZK_HOST']) client.start() client.ensure_path("/storage") if not client.exists('/storage/' + app.config['STORAGE_ID']): client.create('/storage/' + app.config['STORAGE_ID'], b"1000", ephemeral=True) now = datetime.now() timestamp = datetime.timestamp(now) with open(LOG_FILE, 'r') as file: # read a list of lines into data data = file.readlines() line = "_____________________________________________\n" data.insert( 0, line + "Timestamp: " + str(datetime.fromtimestamp(timestamp)) + '\n') with open(LOG_FILE, 'w') as file: file.writelines(data) @app.route('/') def index(): return "Hello from ~Storage Server " + app.config['STORAGE_ID'] + "\n"
if week_change > 0: week_change = Back.GREEN + str(week_change) + '%' + Style.RESET_ALL else: week_change = Back.RED + str(week_change) + '%' + Style.RESET_ALL portfolio_val += value value_string = "{:,}".format(round(value, 2)) table.add_row([ name + "(" + symbol + ")", amount, 'Rs.' + value_string, 'Rs.' + str(price), str(hour_change), str(day_change), str(week_change) ]) print(table) print() portfolio_val_string = '{:,}'.format(round(portfolio_val, 2)) x = datetime.timestamp(datetime.strptime(last_updated, "%Y-%m-%dT%H:%M:%S.%fZ")) last_updated_date = datetime.fromtimestamp(x).strftime('%B %d,%Y at %I:%M%p') print("Total Portfolio Values : " + Back.GREEN + "Rs." + portfolio_val_string + Style.RESET_ALL) print() print("API results last updated on " + last_updated_date) print()
def filterPipelinesPerPage(self, pipelines, startTime): return list( filter( lambda x: datetime.timestamp(startTime) <= (int(x["scheduled_timestamp"]) / 1000), pipelines))
def CleanOldVersion(OldPath, MaxTime): """ permet de transferer les fichiers dans OldVersion qui devront basculer dans Deleted """ Operations = [] now = datetime.now() Dest = OldPath.parent.joinpath("Deleted") ##Verification des archives for Folder in OldPath.walkdirs(): if Folder.name not in PassThisFiles: for element in AsZippedFolder: if element in Folder.name: if abs(datetime.timestamp(now) - os.path.getctime(Folder)) > MaxTime: # cas ou le fichier n'est pas present dans le dossier deleted if os.path.isdir( Dest + "/" + Folder.name) == False and os.path.isfile( Dest + "/" + Folder.name) == False: Operations.append([ (shutil.copytree, (str(Folder), Dest + "/" + Folder.name), "Archive copied to : " + str(Dest + "/" + Folder.name)), (shutil.rmtree, (Folder), "Old archive deleted : " + str(Folder)) ]) # sinon : else: Operations.append([ (shutil.rmtree, (Dest.replace("\\", "/") + "/" + Folder.name), "previous old archive deleted : " + str(Dest + "/" + Folder.name)), (shutil.copytree, (str(Folder), Dest + "/" + Folder.name), "Archive copied to : " + str(Dest + "/" + Folder.name)), (shutil.rmtree, (Folder), "Old archive deleted : " + str(Folder)) ]) #copy_tree(str(Folder),Dest.joinpath(Folder.name)) ##Verification des fichiers for File in OldPath.walkfiles(): if abs(datetime.timestamp(now) - os.path.getmtime(File)) > MaxTime * 24 * 60 * 60: #cas ou le fichier n'est pas present dans le dossier deleted if os.path.isfile(str(Dest + "/" + File.name)) == False: Operations.append([(shutil.move, (str(File), str(Dest + "/" + File.name)), "File moved to Deleted : " + str(File))]) #sinon else: Operations.append([(os.remove, (str(Dest + "/" + File.name)), "previous old file deleted : " + str(Dest + "/" + File.name)), (shutil.move, (str(File), str(Dest + "/" + File.name)), "File moved to Deleted : " + str(File))]) #shutil.move(File,Dest.joinpath(File.name)) return Operations
def canStop(self, pipelines, startTime): scheduledTimestatmps = list( map(lambda x: int(x["scheduled_timestamp"]) / 1000, pipelines)) return (min(scheduledTimestatmps)) < datetime.timestamp(startTime)
def getDatetime(self, time): date_time = datetime.strptime(time, '%Y-%m-%d %H:%M:%S.%f') return datetime.timestamp(date_time)
def on_event(self, event: Event): if isinstance(event, Plot): plot = event.payload if plot['name'].lower() == 'price': if plot['data']['timeframe'] == '1m': date = int( datetime.timestamp( datetime.fromisoformat( plot['data']['datetime']))) * 1000 self.series['OHLC']['data'].append([ date, plot['data']['open'], plot['data']['high'], plot['data']['low'], plot['data']['close'] ]) if plot['data'][ 'liquidationPrice'] is not None and 0.8 < float( plot['data']['liquidationPrice'] ) / plot['data']['close'] < 1.2: liq_price = float(plot['data']['liquidationPrice']) else: liq_price = None self.series['liq_price']['data'].append([date, liq_price]) self.series['volume']['data'].append( [date, plot['data']['vol']]) y = round(self.balance['quote_balance'] + self.balance['base_balance'] * plot['data']['close']) self.series['equity']['data'].append([date, y]) self.series['base_equity']['data'].append([ date, self.balance['base_balance'] + self.balance['quote_balance'] / plot['data']['close'] ]) self.series['position']['data'].append( [date, self.balance['position_vol'] + 0]) self.series['position_price']['data'].append( [date, self.balance['position_price']]) self.series['quote_balance']['data'].append( [date, round(self.balance['quote_balance'], 2)]) self.series['base_balance']['data'].append( [date, self.balance['base_balance']]) self.series['hold']['data'].append([ date, round(self.series['base_equity']['data'][0][1] * plot['data']['close']) if self.series['base_equity']['data'] else round(plot['data']['base_equity'] * plot['data']['close']) ]) if self.live_plot: time.sleep(1) self.new_price() elif plot['name'].lower() == 'equity': if self.series['assets'] is None: self.series['assets'] = { 'quote': plot['quote'], 'base': plot['base'] } if self.live_plot: asyncio.get_event_loop().run_until_complete( self.update_plot( { "action": "produce", "type": "assets", "data": self.series['assets'] }, self.chart_port)) asyncio.get_event_loop().run_until_complete( self.update_plot( { "action": "produce", "type": "assets", "data": self.series['assets'] }, self.balance_port)) self.balance['quote'] = round(plot['data']['y'], 2) self.balance['base'] = plot['data']['base_equity'] self.balance['quote_balance'] = plot['data']['quote_balance'] self.balance['base_balance'] = plot['data']['base_balance'] self.balance['position_vol'] = plot['data']['position_vol'] self.balance[ 'position_price'] = plot['data']['position_price'] if plot[ 'data']['position_price'] != 0 else None self.balance['invested'] = plot['data']['invested'] elif plot['name'].lower() == 'buy': if self.chart_type > 0: order_data = self.format_order_data(plot) self.series['buy']['data'].append(order_data) if self.chart_type > 1: self.close_line(order_data, self.series['buy_series']['series']) if self.live_plot: asyncio.get_event_loop().run_until_complete( self.update_plot( { "action": "produce", "type": 'buy', "data": order_data }, self.chart_port)) elif plot['name'].lower() == 'sell': if self.chart_type > 0: order_data = self.format_order_data(plot) self.series['sell']['data'].append(order_data) if self.chart_type > 1: self.close_line(order_data, self.series['sell_series']['series']) if self.live_plot: asyncio.get_event_loop().run_until_complete( self.update_plot( { "action": "produce", "type": 'sell', "data": order_data }, self.chart_port)) elif plot['name'].lower() == 'open_buy': if self.chart_type > 1: order_data = self.format_order_data(plot) self.series['open_buy']['data'].append(order_data) self.open_line(order_data, self.series['buy_series']['series']) elif plot['name'].lower() == 'open_sell': if self.chart_type > 1: order_data = self.format_order_data(plot) self.series['open_sell']['data'].append(order_data) self.open_line(order_data, self.series['sell_series']['series']) elif plot['name'].lower() == 'cancel_buy': if self.chart_type > 1: order_data = self.format_order_data(plot) self.series['cancel_buy']['data'].append(order_data) self.close_line(order_data, self.series['buy_series']['series']) elif plot['name'].lower() == 'cancel_sell': if self.chart_type > 1: order_data = self.format_order_data(plot) self.series['cancel_sell']['data'].append(order_data) self.close_line(order_data, self.series['sell_series']['series']) else: if plot['name'] not in self.series: self.series[plot['name']] = plot.copy() self.series[plot['name']]['data'] = [] data = plot['data'].copy() if 'tooltip' not in data: data['tooltip'] = data['y']
def to_timestamp(self): return datetime.timestamp(self.date_obj)
def store(self, msg_id: int, user_id: int) -> None: expiry = datetime.timestamp(datetime.now() + timedelta(days=self.expire)) with open(self.db, "a+", newline="", encoding="utf-8") as csvfile: writer = csv.writer(csvfile, delimiter=",") writer.writerow([msg_id, user_id, int(expiry)])
def __init__(self, prefix=""): from datetime import datetime timestamp = datetime.timestamp(datetime.now()) self._outputfile = open(prefix + "_results_" + str(timestamp) + ".csv", 'w')
def __init__( self, filename, variables_requested=None, runname="noname", printfiles=True, filetimes=False, gridded_int=True, **kwargs): # time_index=None, time_seconds=None, # Prepare model for function registration for the input argument super(MODEL, self).__init__(**kwargs) #collect filenames if '.2D.' in filename: #require that input filename be for 3D file filename2d = filename f = filename.replace('.3D.', '.2D.') #can't replace in place filename = f else: filename2d = filename.replace('.3D.', '.2D.') self.filename = filename self.filename2d = filename2d if printfiles: print(filename, filename2d) #establish time attributes first self._iri3D = Dataset(filename, 'r') self._time = np.array(self._iri3D.variables['time'] ) / 60. #convert to hours since midnight of file self.filedate = datetime(int(filename[-10:-6]),1,1,0,0,0).replace(tzinfo=timezone.utc)+\ timedelta(days=int(filename[-6:-3])-1) #strings with timezone info chopped off (UTC anyway) self.datetimes = [ (self.filedate + timedelta(hours=self._time[0])).isoformat(sep=' ')[:19], (self.filedate + timedelta(hours=self._time[-1])).isoformat(sep=' ')[:19] ] #strings self.filetimes=[datetime.timestamp(datetime.strptime(dt, '%Y-%m-%d %H:%M:%S').replace(\ tzinfo=timezone.utc)) for dt in self.datetimes] #timestamp in seconds, for value matching in wrapper? self.timerange0 = { 'min': self.datetimes[0], 'max': self.datetimes[1], 'n': len(self._time) } #strings in format = YYYY-MM-DD HH:MM:SS self.timerange = self.timerange0 if filetimes: return #collect data and make dimensional grid from 3D file self._iri2D = Dataset(filename2d, 'r') self._lon = np.array(self._iri3D.variables['lon']) self._lat = np.array(self._iri3D.variables['lat']) self._height = np.array(self._iri3D.variables['ht']) #store a few items in iri object self.missing_value = np.NAN self._registered = 0 self.variables = {} self.runname = runname self.modelname = 'MODEL' #if variables_requested not given, collect all values from dict above as a list if variables_requested is None: variables_requested = [ value[0] for key, value in model_varnames.items() ] #collect list of iri variable name equivalents var_names = [ key for key, value in model_varnames.items() if value[0] in variables_requested ] extra_variables = [ var for var in variables_requested if var not in [value[0] for key, value in model_varnames.items()] ] if len(extra_variables ) > 0: #pull out variables not allowed and error if not empty print('Some requested variables are not available:', extra_variables) #register each variable desired for varname in var_names: #determine source file type for variable file_type = '' if varname in self._iri3D.variables.keys(): file_type = '3D' elif varname in self._iri2D.variables.keys(): file_type = '2D' else: raise AttributeError( f"{varname} not found in the files' metadata.") #set variables, units variable = np.array( getattr(self, '_iri' + file_type).variables[varname]) #set data if (len(variable.shape) not in [3, 4]): continue #skip anything not 3D or 4D units = model_varnames[varname][ -1] #units stored as last item in list per varname kamodo_varname = model_varnames[varname][0] #register allowed 3D and 4D variables self.variables[kamodo_varname] = dict( units=units, data=variable) #register in object if len(variable.shape ) == 4: #define and register interpolators for each self.register_4D_variable( units, variable, kamodo_varname, gridded_int) #len(var.shape) instead of file_type elif len(variable.shape) == 3: self.register_3D_variable(units, variable, kamodo_varname, gridded_int) #close netCDF4 files, initialize plotting variables self._iri3D.close() self._iri2D.close() self = RPlot.initialize_4D_plot( self) #initialize 4D plotting variables
def upload_endpoint(): if 'uid' not in session: return '', 401 cover, audio = request.files.get('cover'), request.files.get('audio') title, dist_type = request.form.get('title'), request.form.get('dtype', type=int) start, end = request.form.get('start', type=float), request.form.get('end', type=float) if cover is None: return jsonify(message='Missing image in the request'), 400 if audio is None: return jsonify(message='Missing audio in the request'), 400 if cover.filename == '' or audio.filename == '': return jsonify(message='No file chosen'), 400 if title is None: return jsonify(message='No title provided'), 400 if dist_type is None: return jsonify(message='No distribution type specified'), 400 if start is None or end is None: return jsonify(message='Demo segement not specified'), 400 regex = re.compile(r'{}(.*)$'.format(FLASK_STATIC_FOLDER)) ext = cover.filename.rsplit('.', 1)[-1].lower() if ext in ['jpg', 'jpeg', 'png', 'gif']: filename = f'{int(datetime.timestamp(datetime.now()))}-{secure_filename(cover.filename)}' path = os.path.join(app.config['UPLOAD_FOLER'], filename) cover.save(path) cover = regex.match(path).group(1) else: return jsonify(message='File format error'), 400 ext = audio.filename.rsplit('.', 1)[-1].lower() if ext in ['mp3', 'wav']: ts = int(datetime.timestamp(datetime.now())) filename = secure_filename(audio.filename) path = os.path.join(app.config['UPLOAD_FOLER'], f'{ts}-{filename}') audio.save(path) audio = path path = os.path.join(app.config['UPLOAD_FOLER'], f'{ts}-demo-{filename}') demo = ffmpeg.input(audio, ss=start, t=end - start) demo = ffmpeg.output(demo, path) ffmpeg.run(demo) audio = regex.match(audio).group(1) demo = regex.match(path).group(1) else: return jsonify(message='File format error'), 400 r = requests.post(f'http://{DB_SERVER}/create-media', json={ 'title': title, 'full_audio': audio, 'demo_segment': demo, 'cover': cover, 'dist_type': bool(dist_type), 'uid': session['uid'] }) if r.status_code == 400: return jsonify(r.json()) if len(r.text) > 0 else '', 400 return jsonify( status=True, mid=r.json()['mid'], message='Your music has been successfully uploaded to our platform!', redirect='/'), 200
def get_directory(self): return os.path.join(self.solution.get_directory(), 'tests', 'test' + str(datetime.timestamp(self.create_date)))
def process(self, x, **kwargs): date, _ = x yield window.TimestampedValue( x, datetime.timestamp(datetime.strptime(date, '%Y-%m-%d %H:%M:%S')))
def to_timestamp(self, dateTime): dt = date_time.strptime(dateTime, self.DATE_TIME_FORMAT) return int(date_time.timestamp(dt))
async def dailyreward(self, ctx: commands.Context): """Claim your daily reward.""" config = await self.config.all() if not config["daily_rewards"]["toggled"]: return if not await bank.is_global(): return await ctx.send( _( "The bot's bank need to be global to use this feature. It can be fixed by bot owner." ).format(ctx.clean_prefix) ) author = ctx.author cur_time = int(time.time()) next_daily = await self.config.user(author).next_daily() if cur_time <= next_daily: delta = humanize_timedelta(seconds=next_daily - cur_time) or "1 second" msg = author.mention + _( " Too soon!\nYou have already claim your daily reward!\n" "Wait **{}** for the next one." ).format(delta) if not await ctx.embed_requested(): await ctx.send(msg) else: em = discord.Embed(description=msg, color=discord.Color.red()) await ctx.send(embed=em) return credits_name = await bank.get_currency_name(ctx.guild) weekend = check_weekend() and config["daily_rewards"]["weekend_bonus_toggled"] try: check_vote = await self.dbl.get_user_vote(author.id) except dbl.errors.HTTPException as error: log.error("Failed to fetch Top.gg API.", exc_info=error) return await ctx.send(_("Failed to contact Top.gg API. Please try again later.")) if not check_vote: maybe_weekend_bonus = "" if weekend: maybe_weekend_bonus = _(" and the week-end bonus of {} {}").format( humanize_number(config["daily_rewards"]["weekend_bonus_amount"]), credits_name ) title = _( "**Click here to upvote {bot_name} every 12 hours to earn {amount} {currency}{weekend}!**" ).format( bot_name=self.bot.user.name, amount=humanize_number(config["daily_rewards"]["amount"]), currency=credits_name, weekend=maybe_weekend_bonus, ) vote_url = f"https://top.gg/bot/{self.bot.user.id}/vote" if not await ctx.embed_requested(): await ctx.send(f"{title}\n\n{vote_url}") else: em = discord.Embed(color=discord.Color.red(), title=title, url=vote_url) await ctx.send(embed=em) return regular_amount = config["daily_rewards"]["amount"] weekend_amount = config["daily_rewards"]["weekend_bonus_amount"] next_vote = int(datetime.timestamp(datetime.now() + timedelta(hours=12))) try: await bank.deposit_credits( author, amount=regular_amount + weekend_amount if weekend else regular_amount ) except errors.BalanceTooHigh as exc: await bank.set_balance(author, exc.max_balance) await ctx.send( _( "You've reached the maximum amount of {currency}! (**{new_balance}**) " "Please spend some more \N{GRIMACING FACE}\n\n" "You currently have {new_balance} {currency}." ).format(currency=credits_name, new_balance=humanize_number(exc.max_balance)) ) return pos = await bank.get_leaderboard_position(author) await self.config.user(author).next_daily.set(next_vote) maybe_weekend_bonus = ( _("\nAnd your week-end bonus, +{}!").format(humanize_number(weekend_amount)) if weekend else "" ) title = _("Here is your daily bonus!") description = _( " Take some {currency}. Enjoy! (+{amount} {currency}!){weekend}\n\n" "You currently have {new_balance} {currency}.\n\n" ).format( currency=credits_name, amount=humanize_number(regular_amount), weekend=maybe_weekend_bonus, new_balance=humanize_number(await bank.get_balance(author)), ) footer = _("You are currently #{} on the global leaderboard!").format(humanize_number(pos)) if not await ctx.embed_requested(): await ctx.send(f"{author.mention} {title}{description}\n\n{footer}") else: em = discord.Embed( color=await ctx.embed_color(), title=title, description=author.mention + description, ) em.set_footer(text=footer) await ctx.send(embed=em)
def f(self, meal): last_hour_date_time = datetime.now() - timedelta(hours=24) return meal['timestamp'] > datetime.timestamp(last_hour_date_time)
def get(self, org, repo, pull): full_name = '{}/{}'.format(org, repo) mongo_url = os.getenv('MONGODB_URI', 'mongodb://localhost:27017/worlddriven') mongo = MongoClient(mongo_url) database = mongo.get_database() mongo_repository = database.repositories.find_one( {'full_name': full_name}) token = os.getenv('GITHUB_USER_TOKEN') if mongo_repository: token = mongo_repository['github_access_token'] github_client = github.Github(token) repository = github_client.get_repo(full_name) pull_request = repository.get_pull(pull) pr = PullRequest(repository, pull_request, token) pr.get_contributors() pr.update_contributors_with_reviews() pr.update_votes() pr.get_latest_dates() pr.get_merge_time() for contributor in pr.contributors: pr.contributors[contributor]['time_value'] = timedelta( days=(pr.contributors[contributor]['commits'] / float(pr.votes_total)) * pr.total_merge_time).total_seconds() contributors = [ pr.contributors[contributor] for contributor in pr.contributors ] def activeFirst(value): return abs(value['review_value'] + 0.1) * value['commits'] contributors = sorted(contributors, key=activeFirst, reverse=True) return { 'pull_request': { 'org': org, 'repo': repo, 'number': pull_request.number, 'title': pull_request.title, 'url': pull_request.url, 'user': pull_request.user.raw_data, 'state': pull_request.state, 'mergeable': pull_request.mergeable, 'stats': { 'mergeable': pr.mergeable_pull_request(), 'coefficient': pr.coefficient, 'votes': pr.votes, 'votes_total': pr.votes_total, 'contributors': contributors, 'commits': pr.commits, 'age': { 'days': pr.age.days, 'seconds': pr.age.seconds, 'microseconds': pr.age.microseconds, 'total_seconds': pr.age.total_seconds(), } # 'reviews': get_reviews(repository, pull_request) }, 'dates': { 'max': datetime.timestamp(pr.max_date), 'commit': datetime.timestamp(pr.commit_date), 'unlabel': datetime.timestamp(pr.unlabel_date), 'push': datetime.timestamp(pr.push_date), 'created': datetime.timestamp(pr.pull_request.created_at), 'last_draft': datetime.timestamp(pr.ready_for_review_date), }, 'times': { 'total_merge_time': pr.total_merge_time, 'merge_duration': { 'days': pr.merge_duration.days, 'seconds': pr.merge_duration.seconds, 'microseconds': pr.merge_duration.microseconds, 'total_seconds': pr.merge_duration.total_seconds(), }, 'days_to_merge': { 'days': pr.days_to_merge.days, 'seconds': pr.days_to_merge.seconds, 'microseconds': pr.days_to_merge.microseconds, 'total_seconds': pr.days_to_merge.total_seconds(), }, 'commits': pr.commits, 'merge_date': datetime.timestamp(pr.max_date + pr.merge_duration) } }, }
def backup_lib(): timestamp = int(datetime.timestamp(datetime.now())) adb.su_shell(f'cp /data/data/{app}/lib/libil2cpp.so /sdcard/') adb.pull('/sdcard/libil2cpp.so', f'libil2cpp.so.{timestamp}.bak') adb.su_shell('rm /sdcard/libil2cpp.so')
def perform_create(self, serializer): date_timestamp = datetime.timestamp(datetime.now()) message = serializer.save( from_user_id=1, date_timestamp=date_timestamp, id=randint(523, 7458745) ) self.send_to_telegram(message)
from sklearn.neural_network import MLPClassifier # 92/54 #from sklearn.neighbors import KNeighborsClassifier # useless from sklearn.linear_model import PassiveAggressiveClassifier # 92/86 from sklearn.ensemble import VotingClassifier import joblib # EVALUATION from sklearn.metrics import confusion_matrix, balanced_accuracy_score, classification_report # misc arguments DATASETPATH = "data/set1" MODELPATH = os.path.join(DATASETPATH, "models") os.makedirs(MODELPATH, exist_ok=True) CURRENT_TIME = dt.timestamp(dt.now()) # PREPARE # read all 0-label DOIs with open(os.path.join(DATASETPATH, "0.txt"), "r") as fptr: doi0 = fptr.readlines() files0 = [s.strip().replace("/", "-") + ".json" for s in doi0] # read all 1-label DOIs with open(os.path.join(DATASETPATH, "1.txt"), "r") as fptr: doi1 = fptr.readlines() files1 = [s.strip().replace("/", "-") + ".json" for s in doi1] # read all JSON files y = []
result = datetime.strftime(simdi, '%Y') #şu anın yıl bilgisi result = datetime.strftime(simdi, '%X') #şu anın bilgisini verir result = datetime.strftime(simdi, '%d') #şuanın gün bilgisini verir result = datetime.strftime(simdi, '%Y %B %A') #yıl ay gün #t='21 Nisan 2019' #gun,ay,yil=t.split()#boşluklardan ayır #print(gun) #print(ay) #print(yil) t = '15 April 2019 hour 10:12:30' dt = datetime.strptime(t, '%d %B %Y hour %H:%M:%S') result = dt.year print(dt) birthday = datetime(1983, 5, 9, 12, 30, 10) #yıl ay gün saat dakika saniye result = datetime.timestamp(birthday) #saniye bilgisini verir result = datetime.fromtimestamp( result) #saniye bilgisini datetime a çevirdik(gün ay yıl saat dakika) result = datetime.fromtimestamp(0) #1970 bilgisini verir result = simdi - birthday #gün ay yıl saat farkı result = simdi + timedelta( days=10) #timedelta kullanarak simdinin üzerine 10 gün eklenmiş olur result = simdi + timedelta(days=730, minutes=30) #730 gün ve 30 dakika ekledik print(result) #print(result) #datetime python(modül içindeki fonksiyonlara bakabilirsin)
def to_timestamp(value: datetime) -> float: return float(datetime.timestamp(value))