async def get_events(self, num=10, page=0, more=False, weekend=False): logger.debug("cogs/f1/get_events: Fetching", num=num, more=more, weekend=weekend) lines = [] calendar = await self.calendar() timeline = list(calendar.timeline.start_after(Arrow.now())) start = min(page * num, len(timeline) - num) logger.info("cogs/f1/get_events", start=start, len_timeline=len(timeline)) for event in list(calendar.timeline.now()): lines.append( f"**{event.name}** ongoing, ending " + human(event.end.to(SUPER_TIMEZONE).timestamp, precision=2)) for event in list(timeline)[start:]: local_time = event.begin.to(SUPER_TIMEZONE) lines.append("**{0}** {1}, {2}".format( event.name, human(local_time.timestamp, precision=2), local_time.strftime("%d %b @ %H:%M"), )) if len(lines) >= num or weekend and local_time.isoweekday() in (7, 1): break if more and len(timeline) - start - num: lines.append(f"...and {len(timeline) - start - num} more") logger.info("cogs/f1/get_events: Fetched", result=lines) return lines
def status(): data = {} with open(os.path.join(app.config["DATA_DIR"], b"library", b"lock"), "w") as f: try: fcntl.flock(f, fcntl.LOCK_EX | fcntl.LOCK_NB) fcntl.flock(f, fcntl.LOCK_UN) data["updating"] = False except IOError as e: if e.errno in (errno.EACCES, errno.EAGAIN): data["updating"] = True else: raise if data["updating"]: data["update_started"] = human( datetime.now().replace(microsecond=0) - datetime.fromtimestamp(float(redis.get( "library_update:start"))).replace(microsecond=0)) data["updater"] = { "cmdline": redis.get("library_update:updater:cmdline"), "pid": int(redis.get("library_update:updater:pid")) } data["updating"] = (redis.get("library_update:current") or b"").decode("utf-8", "ignore") else: finish = redis.get("library_update:finish") if finish: data["last_update"] = human( datetime.now().replace(microsecond=0) - datetime.fromtimestamp(float(finish)).replace(microsecond=0)) return render_template("status.html", **data)
def get_task_status(key, section=None): result = [] status = SyncManager.get_status(key, section) if status.previous_timestamp: since = datetime.utcnow() - status.previous_timestamp if since.seconds < 1: result.append('Last run just a moment ago') else: result.append('Last run %s' % human(since, precision=1)) if status.previous_elapsed: if status.previous_elapsed.seconds < 1: result.append('taking less than a second') else: result.append( 'taking %s' % human(status.previous_elapsed, precision=1, past_tense='%s')) if status.previous_success is True: result.append('was successful') elif status.previous_timestamp: # Only add 'failed' fragment if there was actually a previous run result.append('failed') if len(result): return ', '.join(result) + '.' return 'Not run yet.'
def get_task_status(key, section=None): result = [] status = SyncManager.get_status(key, section) if status.previous_timestamp: since = datetime.utcnow() - status.previous_timestamp if since.seconds < 1: result.append('Last run just a moment ago') else: result.append('Last run %s' % human(since, precision=1)) if status.previous_elapsed: if status.previous_elapsed.seconds < 1: result.append('taking less than a second') else: result.append('taking %s' % human( status.previous_elapsed, precision=1, past_tense='%s' )) if status.previous_success is True: result.append('was successful') elif status.previous_timestamp: # Only add 'failed' fragment if there was actually a previous run result.append('failed') if len(result): return ', '.join(result) + '.' return 'Not run yet.'
def main(args): dt_delta = datetime.timedelta(days=args.days) max_pipelines = args.pipelines projects = get("/projects/") if args.project: projects = [ p for p in projects if args.project in p["path_with_namespace"] ] now = datetime.datetime.now() for project in sorted(projects, key=lambda p: p["last_activity_at"], reverse=True): mrs = get("/projects/{}/merge_requests".format(project["id"]), reverse=True, state="opened") pipelines = [ p for p in project_pipelines(project["id"], 2 * max_pipelines) if (now - p["created_at"] < dt_delta) ][:max_pipelines] if not (pipelines or mrs): continue print(project["path_with_namespace"], project["web_url"]) for mr in mrs: build_status = "" pipeline_by_sha = { p["sha"]: p for p in project_pipelines(mr["source_project_id"]) } for commit in get("/projects/{}/merge_requests/{}/commits".format( project["id"], mr["id"])): try: build_status = pipeline_by_sha[commit["id"]]["status"] break except KeyError: pass try: assignee = mr["assignee"]["name"] assignee_color = termstyle.magenta except (TypeError, KeyError): assignee = "nobody :-(" assignee_color = termstyle.red print("{:>10}".format(mr["id"]), status_color("{:>10}".format(build_status)), "{:>20}".format(human(mr["created_at"], 1)), assignee_color("{:>20}".format(assignee)), mr["title"]) for p in pipelines: print("{:>10}".format(p["id"]), status_color("{:>10}".format(p["status"])), "{:>20}".format(human(p["created_at"], 1)), "{:>20}".format(""), p["ref"])
def worker_matlab(hashtags, taskid): conn1 = sqlite3.connect(DB2) conn2 = create_tables('hashtag_counts_matlab.db') L = len(hashtags) percentJump = 0.01 step = int(L * percentJump) print "Step size:", step timediffs = [] start = time.time() rows = [] for i, hashtag in enumerate(hashtags): with conn1: row, _ = fetch(hashtag, dense=False, conn=conn1) row = list(row) row[-1] = to_matlab(row[-1]) row = tuple(row) rows.append(row) percent, remainder = divmod(i, step) if remainder == 0: with conn2: conn2.executemany(insert, rows) rows = [] curr = time.time() if i >= 3: interval = curr - start timediffs.append(interval) interval_delta = datetime.timedelta(seconds=interval) print "{0}:\tInterval: {1}".format(taskid, ago.human(interval_delta)) start = curr msg = "{0}:\t{1}% complete.".format(taskid, percent * percentJump * 100) if i >= 3: # The mean number of seconds per percent avgtime = np.mean(timediffs) timeleft = (1 - percent * percentJump) / percentJump * avgtime if timeleft < 0: timeleft = 0 delta = datetime.timedelta(seconds=timeleft) future = datetime.datetime.now() + delta msg2 = " Estimated completion {0}".format(ago.human(future)) msg += msg2 print(msg) else: with conn2: conn2.executemany(insert, rows) print "{0}:\tFinished.".format(taskid)
async def player(self, ctx: commands.Context, player: str, server_name: str = 'ScrollsGuide'): async with ctx.typing(): async with self.bot.db_engine.acquire() as conn: server = await self.__get_server_check_api( ctx, conn, server_name) if server is None: return else: api = CBSAPI(server.cbsapi) try: player_data = await api.player(player, collection=True, games=True, unlocks=True) except PlayerNotFound: await ctx.send(f"Player `{player}` not found") else: if player_data['last_login']: last_login = human(player_data['last_login'], precision=1) else: last_login = '******' embed = Embed(title=player_data['name']) embed.add_field(name='Rating', value=str(int(player_data['rating']))) embed.add_field(name='Last login', value=last_login) embed.add_field(name='Gold', value=player_data['gold']) embed.add_field( name='Commons', value=player_data['collection']['commons']) embed.add_field( name='Uncommons', value=player_data['collection']['uncommons']) embed.add_field( name='Rares', value=player_data['collection']['rares']) embed.add_field(name='Games won', value=player_data['games']['won'] or 0) embed.add_field(name='Games lost', value=player_data['games']['lost'] or 0) embed.add_field( name='Achievements', value=player_data['unlocks']['achievements']) embed.add_field(name='Created', value=human(player_data['created'], precision=1)) await ctx.send(embed=embed)
def descriptive_timedelta(target: datetime, abbreviate: bool = False, precision: Optional[int] = None) -> str: """Return a descriptive string for how long ago a datetime was. The returned string will be of a format like "4 hours ago" or "3 hours, 21 minutes ago". The second "precision level" is only added if it will be at least minutes, and only one "level" below the first unit. That is, you'd never see anything like "4 hours, 5 seconds ago" or "2 years, 3 hours ago". If `abbreviate` is true, the units will be shortened to return a string like "12h 28m ago" instead of "12 hours, 28 minutes ago". A time of less than a second returns "a moment ago". """ # the ago library doesn't deal with timezones properly, so we need to calculate the # timedelta ourselves and only ever call human() using a timedelta delta = utc_now() - target seconds_ago = delta.total_seconds() if seconds_ago < 1: return "a moment ago" if not precision: # determine whether one or two precision levels is appropriate if seconds_ago < 3600: # if it's less than an hour, we always want only one precision level precision = 1 else: # try a precision=2 version, and check the units it ends up with result = human(delta, precision=2) units = ("year", "day", "hour", "minute", "second") unit_indices = [ i for (i, unit) in enumerate(units) if unit in result ] # if there was only one unit in it, or they're adjacent, this is fine if len(unit_indices) < 2 or unit_indices[1] - unit_indices[0] == 1: precision = 2 else: # otherwise, drop back down to precision=1 precision = 1 result = human(delta, precision, abbreviate=abbreviate) # remove commas if abbreviating ("3d 2h ago", not "3d, 2h ago") if abbreviate: result = result.replace(",", "") return result
def example_usage(): """Test and example usage""" print('\nTest past tense:\n') print(delta2dict( PAST_DELTA )) print('Commented ' + human( PAST_DELTA, 1 )) print(human( PAST, past_tense = "Commented {} ago" )) print(human( ONE_YEAR_FOUR_HOURS_DELTA, past_tense = "Posted {} ago" )) print('\nTest future tense:\n') print(delta2dict( FUTURE_DELTA )) print('Shutdown ' + human( FUTURE_DELTA, 5 )) print(human( FUTURE, future_tense = 'Shutdown in {} from now' )) print('')
def timedelta_to_human(td): ''' Returns a vaguely readable string for a `timedelta` instance. ''' s = ago.human(td, precision=2, past_tense='{}') return ','.join(s.split(',', 3)[0:2])
def unpack_move_rar_file(self, release_unpacker_rar_file, rarfile_file_name, unpack_file_path): """Extract an individual file from release_unpacker_rar_file to unpack_file_path """ # Extract file to tmp_dir log.debug('Extracting {} to {}'.format(rarfile_file_name, self.tmp_dir)) log.info('{} unpack started'.format(unpack_file_path.name)) unpack_start = datetime.now().replace(microsecond=0) extracted_file_path = release_unpacker_rar_file.extract_file( rarfile_file_name, self.tmp_dir) unpack_end = datetime.now().replace(microsecond=0) unpack_time = human(unpack_end - unpack_start, past_tense='{}') if not unpack_time: log.info('{} unpack done'.format(unpack_file_path.name)) else: log.info('{} unpack done, {}'.format(unpack_file_path.name, unpack_time)) # Move file and rename to unpack_dir log.debug('Moving {} to {}'.format(extracted_file_path, unpack_file_path)) extracted_file_path.move(unpack_file_path)
def main(wf): user_input = ''.join(wf.args) if wf.update_available: wf.add_item("An update is available!", autocomplete='workflow:update', valid=False) refresh(wf) top_stories = wf.cached_data('hackernews_top_10', max_age=60) while top_stories is None: top_stories = wf.cached_data('hackernews_top_10', max_age=60) time.sleep(1) for i in range(2,11): if wf.cached_data_fresh('hackernews_top_%s0' % i, 60): top_stories += wf.cached_data('hackernews_top_%s0' % i, max_age=0) else: break for item_id, item in top_stories: title = item['title'] date = human(datetime.fromtimestamp(int(item['time']))) subtitle = '%s points by %s %s' % (item['score'], item['by'], date) url = item['url'] if 'url' in item else 'https://news.ycombinator.com/item?id=%s' % item_id if user_input.lower() in title.lower() or user_input.lower() in subtitle.lower(): wf.add_item(title, subtitle, arg=url, valid=True) wf.send_feedback()
def __init__(self, master, oneDir): self.oneDir = oneDir self.master = master self.frame = tk.Toplevel(self.master) if self.oneDir.user == 'admin': self.list = self.oneDir.admin_list() else: self.list = self.oneDir.list() self.entries = {} counter = 1 tk.Label(self.frame, text="User").grid(row = 1, column=1) tk.Label(self.frame, text="File").grid(row = 1, column=2) tk.Label(self.frame, text="Path").grid(row = 1, column=3) tk.Label(self.frame, text="Modified").grid(row = 1, column=4) tk.Label(self.frame, text="Delete").grid(row = 1, column=5) tk.Label(self.frame, text="Share").grid(row = 1, column=6) count = 2 for f in self.list: tk.Label(self.frame, text=f['username']).grid(row = count, column=1) tk.Label(self.frame, text=f['name']).grid(row = count, column=2) tk.Label(self.frame, text='/' + f['path']).grid(row = count, column=3) modified = human(datetime.datetime.strptime(f['modified'], '%Y-%m-%d %H:%M:%S.%f'), precision=2, past_tense='{}', future_tense='{}') tk.Label(self.frame, text=modified).grid(row = count, column=4) tk.Button(self.frame, borderwidth=4, text="Delete", width=10, pady=8, command=lambda count=count: self.delete(count)).grid(row=count, column=5) tk.Button(self.frame, borderwidth=4, text="Share", width=10, pady=8, command=lambda count=count: self.share(count)).grid(row=count, column=6) count += 1
async def uptime(self, ctx): """**.uptime** - show Super's uptime""" diff = time() - self.start this_long = human(self.start, precision=3).replace("ago", "").strip() async with ctx.message.channel.typing(): return await ctx.message.channel.send( f"i've been awake for {this_long}...")
def build_elapsed(status): elapsed = status.latest.ended_at - status.latest.started_at if elapsed.seconds < 1: return _('taking less than a second') return _('taking %s') % human(elapsed, precision=1, past_tense='%s')
def unpack_move_rar_file(self, release_unpacker_rar_file, rarfile_file_name, unpack_file_path): """Unpack and move RAR file. Extract an individual file from release_unpacker_rar_file to unpack_file_path. """ # Extract file to tmp_dir log.debug("Extracting %s to %s", rarfile_file_name, self.tmp_dir) log.info("%s unpack started", unpack_file_path.name) unpack_start = datetime.now().replace(microsecond=0) extracted_file_path = release_unpacker_rar_file.extract_file( rarfile_file_name, self.tmp_dir) unpack_end = datetime.now().replace(microsecond=0) unpack_time = human(unpack_end - unpack_start, past_tense="{}") if not unpack_time: log.info("%s unpack done", unpack_file_path.name) else: log.info("%s unpack done, %s", unpack_file_path.name, unpack_time) # Move file and rename to unpack_dir log.debug("Moving %s to %s", extracted_file_path, unpack_file_path) extracted_file_path.move(unpack_file_path)
def ago(self): if self.access_set.all().count() == 0: return "Never seen" most_recent = self.access_set.all().order_by('-start_time')[0] end = most_recent.end_time end = end.replace(tzinfo=None) return ago.human(end, 1)
def format_delta(date): return ago.human( (datetime.now(timezone.utc)) - (dateutil.parser.parse(date)), future_tense="{} from now", # No-one should ever see this past_tense="{} ago", precision=1, )
def getSystemInfo(): """Return the last time pacman -Syu was run.""" out = run_from_shell('cat', '/var/log/pacman.log') lastdate = re.findall('\[([\d\-: ]+)\].*-Syu.*', out)[-1] time = datetime.strptime(lastdate, '%Y-%m-%d %H:%M') return jsonify({ "last_update" : human(time, precision=1) })
def create_pump_browser(pump_list): table = [] #header table.append( html.Tr([ html.Th('Pump identifier'), html.Th('Frequency (sec)'), html.Th('Last status'), html.Th('Computed') ])) #rows table += [ html.Tr([ html.Td(dcc.Link(p['id'], href='/pump/' + str(p['id']))), html.Td(p['freq']), html.Td(p['lp']['label'], className='table-warning' if p['lp']['label'] != 'normal' else 'table-success'), html.Td( human( datetime.datetime.strptime(p['lp']['ts'], "%Y-%m-%dT%H:%M:%S.%f"), 1)) ]) for p in pump_list ] return table
def get(self, request, id): post = get_object_or_404(Post, id=id) post.views += 1 post.save() menu_items = [] body_pre = [ post.description, 'Author: {}'.format(post.user.username), 'Expires in: {}'.format(human(post.expires_at)), 'Code: {}'.format(post.code), 'Views: {}'.format(post.views) ] # check to see if we have notifications set in cache for this post if cache.get('new_post'): body_pre.insert(0, 'Post successfuly created!') cache.delete('new_post') elif cache.get('post_private'): body_pre.insert(0, 'Post marked as private!') cache.delete('post_private') elif cache.get('post_renewed'): body_pre.insert(0, 'Post successfuly renewed!') cache.delete('post_renewed') elif cache.get('msg_sent'): body_pre.insert(0, 'Your message successfuly sent!') cache.delete('msg_sent') elif cache.get('msg_not_sent'): body_pre.insert(0, 'Message was not sent, please try again later!') cache.delete('msg_sent') menu_items.extend([MenuItem(description=u'\n'.join(body_pre))]) if post.user == self.get_user(): # viewing user is the post owner menu_items.extend([ MenuItem(description='Renew', method='PUT', path=reverse('post_detail', args=[post.id]) + '?attr=renew'), MenuItem(description='Delete', method='DELETE', path=reverse('post_detail', args=[post.id])) ]) if not post.is_private: menu_items.extend([ MenuItem(description='Make private', method='PUT', path=reverse('post_detail', args=[post.id]) + '&attr=is_private') ]) else: menu_items.extend([ MenuItem(description='Send message', method='GET', path=reverse('send_msg', args=[post.id])), ]) content = Menu(body=menu_items, header=post.title, footer='Reply MENU') return self.to_response(content)
def build_since(status): since = datetime.utcnow() - status.latest.ended_at if since.seconds < 1: return _('Last run just a moment ago') return _('Last run %s') % human(since, precision=1)
def human_date_utc(*args, **kwargs): if isinstance(args[0], (int, float, str)): args = [datetime.datetime.utcfromtimestamp(float(args[0]))] + list( args[1:]) delta = (datetime.datetime.utcnow() - args[0]) delta = delta - datetime.timedelta(microseconds=delta.microseconds) return ago.human(delta, *args[1:], **kwargs)
async def last_seen(opsdroid, config, message): name = message.regex.group('name') seen = await opsdroid.memory.get("seen") if seen == None or name not in seen: await message.respond("I've never seen {} before".format(name)) else: await message.respond("I last saw {} {}".format(name, human(seen[name], precision=1)))
def get_expiries(): now = datetime.now() default = app.config.get('EXPIRY') default_hours = default.days * 24 if hasattr(default, 'hours'): default_hours += default.hours default_times = [ 1, # an hour 24, # a day 24 * 7, # a week 24 * 7 * 30, # a month 27 * 7 * 365, # a year 27 * 7 * 365 * 100, # when the server dies (100 years) ] try: default_times.remove(default_hours) except ValueError: pass default_times.append(default_hours) expiries = [{ 'name': human(now + timedelta(hours=span, seconds=1), precision=1), 'value': from_datetime(now + timedelta(hours=span)), } for span in default_times] expiries[-1]['name'] += ' (default)' return expiries
def check_posted(phenny, input, url): if url: conn = sqlite3.connect(phenny.posted_db, detect_types=sqlite3.PARSE_DECLTYPES) c = conn.cursor() c.execute("SELECT nick, time FROM posted WHERE channel=? AND url=?", (input.sender, url)) res = c.fetchone() posted = None if res: nickname = res[0] time = human(res[1]) posted = "{0} by {1}".format(time, nickname) else: c.execute( "INSERT INTO posted (channel, nick, url) VALUES (?, ?, ?)", (input.sender, input.nick, url)) conn.commit() conn.close() return posted
def sw_dict(sw): last_update = sw.get('lastCommit', sw.get('updatedAt')) or '2015-01-01T12:00:00Z' return { 'numCommits': sw.get('totalCommits'), 'numMentions': len(sw.get('related').get('mentions')), 'lastUpdate': last_update, 'lastUpdateAgo': ago.human(str_to_datetime(last_update), precision=1), 'tags': sw.get('tags'), 'primaryKey': sw.get('primaryKey'), 'brandName': sw.get('brandName'), 'shortStatement': sw.get('shortStatement'), 'isFeatured': sw.get('isPublished'), 'relatedOrganizations': [{ 'foreignKey': { key: org['foreignKey'][key] for key in ['primaryKey', 'name'] } } for org in sw.get('related').get('organizations')], 'slug': sw.get('slug'), }
def get_time_left(created_at): return ago.human( (datetime.now(timezone.utc).replace(hour=23, minute=59, second=59)) - (dateutil.parser.parse(created_at) + timedelta(days=8)), future_tense='Data available for {}', past_tense='Data no longer available', # No-one should ever see this precision=1)
def get_humanize_local_datetime_from_utc_datetime(utc_target_datetime): local_now = datetime.datetime.now(tz.tzlocal()) if utc_target_datetime: local_target_datetime = utc_target_datetime.replace( tzinfo=tz.gettz("UTC")).astimezone(tz=tz.tzlocal()) return ago.human(local_now - local_target_datetime, precision=1) return ""
def project_index_template(): url = api_url + "/project_cache" project_data = requests.get(url).json() projects = [] for project in project_data: status = project_status(project["dateStart"], project["dateEnd"])["status"] projects.append({"id": project["primaryKey"]["id"], "title": project["title"], "subtitle": project["subtitle"], "image": project["image"], "yearStart": get_year_from_date_string(project["dateStart"]), "yearEnd": get_year_from_date_string(project["dateEnd"]), "status": status, "numMentions": len(project["output"]) + len(project["impact"]), "lastUpdate": project["updatedAt"], "lastUpdateAgo": ago.human(str_to_datetime(project["updatedAt"]), precision=1), "topics": project["topics"], "technologies": project["technologies"], }) mentions = get_project_mentions(project_data) status_choices = ['Starting','Running', 'Finished'] schema = get_schema() topic_choices = schema["project"]["properties"]["topics"]["items"]["enum"] technology_choices = schema["project"]["properties"]["technologies"]["items"]["enum"] return flask.render_template('project_index/template.html', data_json=flask.Markup(json.dumps(projects)), projects=projects, status_choices_json=flask.Markup(json.dumps(status_choices)), topic_choices_json=flask.Markup(json.dumps(topic_choices)), technology_choices_json=flask.Markup(json.dumps(technology_choices)), mentions=mentions)
def stalk(username, password): print(time.strftime('%H:%M:%S Initiating stalking', time.localtime())) try: login(username, password) for dev in devs: time.sleep(10) hdr = { 'User-Agent' : 'Mojang stalker bot for /r/scrolls' } req = urllib2.Request("http://www.reddit.com/user/%s/comments.json?limit=10"%dev, headers=hdr) h = urllib2.urlopen(req) d = json.loads(h.read()) for p in d["data"]["children"]: # Filter only a specific subreddit if p["data"]["subreddit_id"] == subreddit_id: posts.append(p) # Sort by the time the post was created posts.sort(key=lambda p: p["data"]["created"],reverse=True) newcontent = "" for post in posts[0:20]: timestamp = human(datetime.datetime.fromtimestamp(post["data"]["created"]) - datetime.timedelta(hours=8)) thread_id = post["data"]["link_id"][3:] comment_id = post["data"]["id"] url = "http://www.reddit.com/r/%s/comments/%s//%s?context=3" % (subreddit_name,thread_id,comment_id) newcontent += "[](/mojang) /u/%s (%s) [%s](%s) by /u/%s \n\n>%s\n\n****\n\n" % (post["data"]["author"], timestamp, post["data"]["link_title"], url, post["data"]["link_author"], post["data"]["body"].replace("\n","\n> ")) del posts[:] reddit.edit_wiki_page(subreddit=subreddit_name, page=page, content=newcontent, reason='') print(time.strftime('%H:%M:%S Stalking complete', time.localtime())) except Exception as e: print("Error stalking: " + str(e))
def burn(): # calories = met * weight * hours stuff = map(json.loads, request.args.getlist('stuff')) amounts = ['{} of {}'.format(x['count'], x['name']) for x in stuff] query = ' and '.join(amounts) calories = float(get_calories_from_wa(query)) weight_kg = float(request.args['weight']) * 0.453592 data = {'calories': calories, 'activities': {}} for (name, met) in ACTIVITY_MET_VALUES.items(): hours = float(calories / (met * weight_kg)) delta = timedelta(hours=hours) humanized = human(delta, past_tense='{}') humanized = (humanized.replace(' hours', 'h').replace(' hour', 'h').replace( ' minutes', 'm').replace(' minute', 'm').replace(',', '')) data['activities'][name] = { 'time': humanized, 'distance': '{:.1f} km'.format(hours * SPEED[name]) } resp = jsonify({'activities': data}) resp.status_code = 200 resp.headers = { 'Access-Control-Allow-Headers': request.headers.get('Access-Control-Request-Headers'), 'Access-Control-Allow-Methods': request.headers.get('Access-Control-Request-Method'), 'Access-Control-Allow-Origin': '*' } return resp
def getdata(): try: soup = Soup(requests.get("http://steamcommunity.com/id/{id}/games?tab=all".format(id=request.args.get('id', ''))).text) except KeyError: return "No user ID provided" try: script = soup.findAll('script')[11].text except IndexError: resp = make_response(json.dumps({"name": "Couldn't get Steam information", "appid": "0", "logo": "http://puushbrowse.blha303.com.au/broken.png"})) resp.headers["Access-Control-Allow-Origin"] = "http://steamroulette.blha303.com.au"; return resp data = json.loads(script.strip().split("\r\n")[0][14:-1]) options = [] for a in data: if "last_played" in a: if a["last_played"] < time()-604800: a["since"] = ago.human(datetime.fromtimestamp(a["last_played"])) options.append(a) else: a["name"] += " <i>(never played)</i>" options.append(a) resp = make_response(json.dumps(choice(options))) # resp = make_response("Play {name}: <a href=\"steam://play/{appid}\">Click here</a><br><a href=\"steam://play/{appid}\"><img src=\"{logo}\" alt=\"{name}\">".format(**choice(options))) resp.headers["Access-Control-Allow-Origin"] = "http://steamroulette.blha303.com.au"; return resp
def check_posted(phenny, input, url): if url: conn = sqlite3.connect(phenny.posted_db, detect_types=sqlite3.PARSE_DECLTYPES) c = conn.cursor() c.execute("SELECT nick, time FROM posted WHERE channel=? AND url=?", (input.sender, url)) res = c.fetchone() posted = None if res: nickname = res[0] time = human(res[1]) posted = "{0} by {1}".format(time, nickname) else: c.execute("INSERT INTO posted (channel, nick, url) VALUES (?, ?, ?)", (input.sender, input.nick, url)) conn.commit() conn.close() return posted
def burn(): # calories = met * weight * hours stuff = map(json.loads, request.args.getlist('stuff')) amounts = ['{} of {}'.format(x['count'], x['name']) for x in stuff] query = ' and '.join(amounts) calories = float(get_calories_from_wa(query)) weight_kg = float(request.args['weight']) * 0.453592 data = {'calories': calories, 'activities': {}} for (name, met) in ACTIVITY_MET_VALUES.items(): hours = float(calories / (met * weight_kg)) delta = timedelta(hours=hours) humanized = human(delta, past_tense='{}') humanized = (humanized .replace(' hours', 'h') .replace(' hour', 'h') .replace(' minutes', 'm') .replace(' minute', 'm') .replace(',', '') ) data['activities'][name] = { 'time': humanized, 'distance': '{:.1f} km'.format(hours * SPEED[name]) } resp = jsonify({'activities': data}) resp.status_code = 200 resp.headers = { 'Access-Control-Allow-Headers': request.headers.get('Access-Control-Request-Headers'), 'Access-Control-Allow-Methods': request.headers.get('Access-Control-Request-Method'), 'Access-Control-Allow-Origin': '*' } return resp
async def on_member_remove(self, member): webhook = await self._get_webhook(member.guild, 'join_leave_log') if not webhook: return roles = ', '.join([role.mention for role in member.roles][1:]) embed = log_embed_danger( 'Member left', self.bot ) embed.add_field(name='Name:', value=member.name, inline=True) embed.add_field(name='Mention:', value=member.mention, inline=True) embed.add_field(name='Joined Discord:', value=ago.human( member.created_at), inline=True) embed.add_field(name='Roles:', value=roles or 'No roles', inline=False) embed.set_footer( text=f'Member ID: {member.id}', icon_url=member.avatar_url) embed.set_thumbnail(url=member.avatar_url) await send_webhook( webhook.url, self.bot.aio_session, embed=embed )
def redditor_info(bot, trigger, match=None): """Show information about the given Redditor""" commanded = re.match(bot.config.prefix + 'redditor', trigger) r = praw.Reddit(user_agent=USER_AGENT) match = match or trigger try: u = r.get_redditor(match.group(2)) except: if commanded: bot.say('No such Redditor.') return NOLIMIT else: return #Fail silently if it wasn't an explicit command. message = '[REDDITOR] ' + u.name if commanded: message = message + ' | http://reddit.com/u/' + u.name if u.is_gold: message = message + ' | 08Gold' if u.is_mod: message = message + ' | 05Mod' message = message + ' | Link: ' + str(u.link_karma) + ' | Comment: ' + str(u.comment_karma) # age message = message + ' | Redditor for ' + human(datetime.datetime.utcnow() - datetime.datetime.utcfromtimestamp(int(u.created_utc)), past_tense='{}') bot.say(message)
async def on_member_join(self, member): webhook_join = await self._get_webhook(member.guild, 'join_leave_log') if not webhook_join: return join_embed = log_embed_info( 'Member joined', self.bot ) join_embed.add_field(name='Name:', value=member.name, inline=True) join_embed.add_field( name='Mention:', value=member.mention, inline=True) join_embed.add_field(name='Joined Discord:', value=ago.human( member.created_at), inline=True) join_embed.set_footer( text=f'Member ID: {member.id}', icon_url=member.avatar_url) join_embed.set_thumbnail(url=member.avatar_url) await send_webhook( webhook_join.url, self.bot.aio_session, embed=join_embed ) webhook_invites = await self._get_webhook(member.guild, 'invite_log') if not webhook_invites: return invite = await self._get_invite(member.guild) invite_embed = log_embed_info( 'Member joined using invite link', self.bot, ) if not invite: invite_embed.description = 'Sorry, I couldn\'t figure out how this person joined.' else: invite_embed.add_field( name='Name:', value=member.name, inline=True) invite_embed.add_field(name='ID:', value=invite.id, inline=True) invite_embed.add_field( name='Guild:', value=invite.guild.name, inline=True) invite_embed.add_field( name='Inviter:', value=invite.inviter.name, inline=True) invite_embed.add_field(name='URL', value=invite.url, inline=True) invite_embed.set_thumbnail(url=member.avatar_url) invite_embed.set_footer( text=f'User ID: {member.id}', icon_url=member.avatar_url) await send_webhook( webhook_invites.url, self.bot.aio_session, embed=invite_embed )
def get_time_left(created_at, service_data_retention_days=7): return ago.human( (datetime.now(timezone.utc)) - (dateutil.parser.parse(created_at).replace(hour=0, minute=0, second=0) + timedelta(days=service_data_retention_days + 1)), future_tense='Data available for {}', past_tense='Data no longer available', # No-one should ever see this precision=1)
def get_time_left(job_created_at): return ago.human( (datetime.now(timezone.utc).replace(hour=23, minute=59, second=59)) - (dateutil.parser.parse(job_created_at) + timedelta(days=8)), future_tense="Data available for {}", past_tense="Data no longer available", # No-one should ever see this precision=1, )
def kill_list(self, mess, args): """Show everyone who is on the watch list.""" now = datetime.utcnow() members = self["users"].values() members = sorted(members, key=lambda member: member['character_name'].lower()) for member in members: member['time_ago'] = ago.human(now - member['time']) return {'members': members}
def _friendly_built_on(self): timestamp = math.floor(self.timestamp_millis / 1000) if datetime.now().timestamp() - timestamp < 60: return 'less than a minute ago' date = datetime.fromtimestamp(timestamp) return human(date, precision=1)
def time_since(self): try: delta = datetime.utcnow() - datetime.strptime( self.created_at, "%a, %d %b %Y %H:%M:%S +0000") return ago.human(delta, precision=2) except AttributeError as e: return "" except: raise
def _friendly_duration(self): if self.duration_millis < 1000: return 'negligible' time_delta = timedelta(milliseconds=self.duration_millis) return human(time_delta, precision=1, past_tense='{}', future_tense='{}')
def human_date_utc(*args, **kwargs): if isinstance(args[0], type(None)): return "never" if isinstance(args[0], (int, float, str)): args = [datetime.datetime.utcfromtimestamp(float(args[0]))] + list(args[1:]) delta = (datetime.datetime.utcnow() - args[0]) delta = delta - datetime.timedelta(microseconds=delta.microseconds) result = ago.human(delta, *args[1:], **kwargs) return "just now" if result == " ago" else result
def kill_list(self, mess, args): """Show everyone who is on the watch list.""" now = datetime.datetime.utcnow() members = self["users"].values() members = sorted(members, key=lambda member: member["character_name"]) for member in members: member["time_ago"] = ago.human(now - member["time"]) return {"members": members, "value": humanize.intword(self["value"])}
def descriptive_timedelta(target: datetime, abbreviate: bool = False) -> str: """Return a descriptive string for how long ago a datetime was. The returned string will be of a format like "4 hours ago" or "3 hours, 21 minutes ago". The second "precision level" is only added if it will be at least minutes, and only one "level" below the first unit. That is, you'd never see anything like "4 hours, 5 seconds ago" or "2 years, 3 hours ago". If `abbreviate` is true, the units will be shortened to return a string like "12h 28m ago" instead of "12 hours, 28 minutes ago". A time of less than a second returns "a moment ago". """ seconds_ago = (utc_now() - target).total_seconds() if seconds_ago < 1: return 'a moment ago' # determine whether one or two precision levels is appropriate if seconds_ago < 3600: # if it's less than an hour, we always want only one precision level precision = 1 else: # try a precision=2 version, and check the units it ends up with result = human(target, precision=2) units = ('year', 'day', 'hour', 'minute', 'second') unit_indices = [i for (i, unit) in enumerate(units) if unit in result] # if there was only one unit in it, or they're adjacent, this is fine if len(unit_indices) < 2 or unit_indices[1] - unit_indices[0] == 1: precision = 2 else: # otherwise, drop back down to precision=1 precision = 1 result = human(target, precision, abbreviate=abbreviate) # remove commas if abbreviating ("3d 2h ago", not "3d, 2h ago") if abbreviate: result = result.replace(',', '') return result
def str_date_to_age(str_date): try: parsed_date = datetime.datetime.strptime(str_date[:-len('+00:00')], "%Y-%m-%dT%H:%M:%S") res = ago.human(parsed_date, precision=1) if not res: return str_date return res except BaseException: pass return str_date
def worker(hashtags, taskid): conn_new = create_tables(DB2) L = len(hashtags) percentJump = 0.01 step = int(L * percentJump) print "Step size:", step timediffs = [] start = time.time() rows = [] for i, hashtag in enumerate(hashtags): rows.append(stats(hashtag, row=True)) percent, remainder = divmod(i, step) if remainder == 0: with conn_new: conn_new.executemany(insert, rows) rows = [] curr = time.time() if i >= 3: interval = curr - start timediffs.append(interval) interval_delta = datetime.timedelta(seconds=interval) print "{0}:\tInterval: {1}".format(taskid, ago.human(interval_delta)) start = curr msg = "{0}:\t{1}% complete.".format(taskid, percent * percentJump * 100) if i >= 3: # The mean number of seconds per percent avgtime = np.mean(timediffs) timeleft = (1 - percent * percentJump) / percentJump * avgtime delta = datetime.timedelta(seconds=timeleft) future = datetime.datetime.now() + delta msg2 = " Estimated completion {0}".format(ago.human(future)) msg += msg2 print(msg) else: with conn_new: conn_new.executemany(insert, rows) print "{0}:\tFinished.".format(taskid)
def build_elapsed(status): elapsed = status.latest.ended_at - status.latest.started_at if elapsed.seconds < 1: return _('taking less than a second') return _('taking %s') % human( elapsed, precision=1, past_tense='%s' )
def highlights(self, cat = 'DISCUSSION', count = 10): resp = self.rs.get(host + 'communities-api/v1/discussion/highlights/' + self.id + '?type=' + cat + '&sort=RECENT&count=' + str(count) + '&start=0', headers = {'csrf-token': self.csrfToken }) #print('resp.content', resp.content) #pprint(json.loads(resp.content)) print('\nHighlights\n') for d in json.loads(resp.content)['data']: #print(time.strftime('%y-%m-%d %H:%M', time.localtime(int(d['datePosted'])/1000)), d['title']) print(ago.human(datetime.fromtimestamp(int(d['discussion']['datePosted'])/1000), precision=1, abbreviate=True), '\t', d['community']['name'] + ':', d['discussion']['title'])
def startupnews(): ts = models.LastUpdated.get('startupnews') return render_template('index.html', title='Startup News Digest', news_list=models.StartupNews.query.order_by('rank').all(), navs=[ ('Startup News', 'http://news.dbanotes.net/news'), ('New', 'http://news.dbanotes.net/newest'), ('Comments', 'http://news.dbanotes.net/newcomments'), ('Leaders', 'http://news.dbanotes.net/leaders'), ('Submit', 'http://news.dbanotes.net/submit')], last_updated = ts and human(ts, 1) )
def xup_list(self, mess, args): """Show everyone who is on the ready list.""" now = datetime.utcnow() members = self['users'].values() members = sorted(members, key=lambda member: member['time']) for member in members: if ((now - member['time']).seconds//3600) < 2: member['message'] = " ".join(member['args']) member['time_ago'] = ago.human(now - member['time'], 1) return {'members': members}
def hackernews(): ts = models.LastUpdated.get('hackernews') return render_template('index.html', title='Hacker News Digest', news_list=models.HackerNews.query.order_by('rank').all(), navs=[ ('Hacker News', 'https://news.ycombinator.com/news'), ('New', 'https://news.ycombinator.com/newest'), ('Comments', 'https://news.ycombinator.com/newcomments'), ('Show', 'https://news.ycombinator.com/show'), ('Ask', 'https://news.ycombinator.com/ask'), ('Jobs', 'https://news.ycombinator.com/jobs'), ('Submit', 'https://news.ycombinator.com/submit')], last_updated = ts and human(ts, 1) )
def agolookup(): if request.args.get('ts') and request.args.get('strf'): try: if request.args.get('strf') == "%epoch": dt = datetime.fromtimestamp(float(request.args.get('ts'))) else: dt = datetime.strptime(request.args.get('ts'), request.args.get('strf')) dt = dt - timedelta(hours=4) # gotta get get UTC resp = {'ago': human(dt), '_agoreq': request.args.get('ts'), '_agostrf': request.args.get('strf')} if request.args.get('strf_to'): resp['strf_to'] = dt.strftime(request.args.get('strf_to')) return jsonify(resp) except Exception as e: return jsonify({'error': "{}: {}".format(type(e).__name__, e.message)}) return jsonify({'error': 'Needs ts and strf as get variables'})
def finish(self): self.ended = utils.now() verb = 'finished successfully in' if not self.success: verb = 'failed after' ts = ago.human( self.ended - self.started, precision=5, past_tense='%s {0}' % verb # hee hee ) self.log.info('{0} {1}'.format(self.version, ts)) self.log_handler.pop_application()
def ViewMessage(error_id): # Retrieve message from database message = MessageManager.get.by_id(error_id) # Update `last_viewed_at` field message.last_viewed_at = datetime.utcnow() message.save() # Parse request headers web_client = Request.Headers.get('X-Plex-Product', '').lower() == 'plex web' # Build objects oc = ObjectContainer( title2='[%s] %s' % (Message.Type.title(message.type), Trim(message.summary)) ) if message.type == Message.Type.Exception: # Display exception samples for e in message.exceptions.order_by(Exception.timestamp.desc()).limit(50): since = datetime.utcnow() - e.timestamp callback = Callback(ViewMessage, error_id=error_id) if web_client: # Display exception traceback in Plex/Web callback = Callback(ViewException, exception_id=e.id) oc.add(DirectoryObject( key=callback, title=pad_title('[%s] %s: %s' % (human(since, precision=1), e.type, e.message)), thumb=R("icon-exception.png") )) elif message.type in [Message.Type.Info, Message.Type.Warning, Message.Type.Error, Message.Type.Critical]: # Display message code oc.add(DirectoryObject( key='', title=pad_title('Code: %s' % hex(message.code)) )) # Display message description if message.description: oc.add(DirectoryObject( key='', title=pad_title('Description: %s' % message.description) )) return oc