def get_verbose_status_of_marathon_app(app): """Takes a given marathon app object and returns the verbose details about the tasks, times, hosts, etc""" output = [] create_datetime = datetime_from_utc_to_local(isodate.parse_datetime(app.version)) output.append(" Marathon app ID: %s" % PaastaColors.bold(app.id)) output.append(" App created: %s (%s)" % (str(create_datetime), humanize.naturaltime(create_datetime))) output.append(" Tasks:") rows = [("Mesos Task ID", "Host deployed to", "Deployed at what localtime")] for task in app.tasks: local_deployed_datetime = datetime_from_utc_to_local(task.staged_at) if task.host is not None: hostname = "%s:%s" % (task.host.split(".")[0], task.ports[0]) else: hostname = "Unknown" rows.append(( get_short_task_id(task.id), hostname, '%s (%s)' % ( local_deployed_datetime.strftime("%Y-%m-%dT%H:%M"), humanize.naturaltime(local_deployed_datetime), ) )) output.append('\n'.join([" %s" % line for line in format_table(rows)])) if len(app.tasks) == 0: output.append(" No tasks associated with this marathon app") return app.tasks, "\n".join(output)
def check_upload(box_name, upload_threshold): """Checks last upload time for box_name and returns Nagios""" try: headers = {'Cache-Control': 'no-cache'} response = requests.get(API_URL.format(box_name), headers=headers) response.raise_for_status() except Exception as e: print('{} failed getting health from backend: {}'.format(box_name, e)) return UNKNOWN try: health = response.json() except Exception as e: print('{} failed decoding health response: {}'.format(box_name, e)) return UNKNOWN if 'last_upload' not in health: print('{} last_upload not found in health results'.format(box_name)) return UNKNOWN last_upload = datetime.now(tzutc()) - parser.parse(health['last_upload']) if last_upload.total_seconds() > upload_threshold: print('{} last uploaded {}'.format(box_name, naturaltime(last_upload))) return CRITICAL print('{} last upload {}'.format(box_name, naturaltime(last_upload))) return OK
def handle_message(tm2source, bucket, s3_url, body): msg = json.loads(body.decode('utf-8')) task_id = msg['id'] mbtiles_file = task_id + '.mbtiles' source = 'tmsource://' + os.path.abspath(tm2source) sink = 'mbtiles://' + os.path.abspath(mbtiles_file) tilelive_cmd = [] if msg['type'] == 'pyramid': tilelive_cmd = render_pyramid(msg, source, sink) elif msg['type'] == 'list': tilelive_cmd = render_list(msg, source, sink) else: raise ValueError("Message must be either of type pyramid or list") _, render_time = timing(subprocess.check_call, tilelive_cmd, timeout=5*60) print('Render MBTiles: {}'.format(naturaltime(render_time))) _, optimize_time = timing(optimize_mbtiles, mbtiles_file) print('Optimize MBTiles: {}'.format(naturaltime(optimize_time))) _, upload_time = timing(upload_mbtiles, bucket, mbtiles_file) print('Upload MBTiles : {}'.format(naturaltime(upload_time))) download_link = s3_url(mbtiles_file) print('Uploaded {} to {}'.format( naturalsize(os.path.getsize(mbtiles_file)), download_link )) os.remove(mbtiles_file) return create_result_message(task_id, download_link, msg)
def on_message(msg, server): text = msg.get("text", "") match = re.findall(r"!status( .*)?", text) if not match: return session = Session() try: realm = session.query(Realm).filter(Realm.name == wowconfig["realm"]).one() status = u"Currently *{0}* is ".format(realm.name) if realm.online: status += "online (last check: {0})".format( humanize.naturaltime(realm.lastchecked) ) status += "\n\n*Battleground status*:\n```" for area in realm.areas: status += "{0} : {1} controlled : {2} : next in {3}\n".format( area.name, area.faction, area.status, humanize.naturaldelta(area.next), ) status += "```" else: status += "offline (last check: {0}, last seen: {1})".format( humanize.naturaltime(realm.lastchecked), humanize.naturaltime(realm.lastseen) ) return status except NoResultFound: return u"No status known on *{0}*".format(wowconfig["realm"])
def prettify(match): """ Prettifies given match object """ diff = (datetime.datetime.now(tz=dateutil.tz.tzlocal()) - dateutil.parser.parse(match['datetime'])) seconds = diff.total_seconds() if seconds > 0: if seconds > 60 * 90: status = PAST else: status = NOW else: status = FUTURE if status in [PAST, NOW]: color = colorama.Style.BRIGHT + colorama.Fore.GREEN else: color = colorama.Style.NORMAL + colorama.Fore.WHITE home = match['home_team'] away = match['away_team'] if status == NOW: minute = int(seconds / 60) match_status = "Being played now: %s minutes gone" % minute elif status == PAST: if match['winner'] == 'Draw': result = 'Draw' else: result = "%s won" % (match['winner']) match_status = "Played %s. %s" % (humanize.naturaltime(diff), result) else: match_status = "Will be played %s" % humanize.naturaltime(diff) if status == NOW: match_percentage = int(seconds / 60 / 90 * 100) elif status == FUTURE: match_percentage = 0 else: match_percentage = 100 return u""" {} {:<30} {} - {} {:>30} {} \u26BD {} """.format( color, home['country'], home['goals'], away['goals'], away['country'], progress_bar(match_percentage), colorama.Fore.WHITE + match_status )
def get(self): target,targetdomain= geturlanddomain(self.request.get('target')) unverified = self.request.get('unverified','off') == 'on' jsonformat = self.request.get('json','off') == 'on' targetkey = ndb.Key('Domain', targetdomain) logging.info("ListMentions target:%s targetdomain %s unverified %s json %s" % (target,targetdomain,unverified,json)) if unverified: mentionquery = Mention.query(ancestor = targetkey).order(-Mention.updated) else: mentionquery = Mention.query(ancestor = targetkey).filter(Mention.verified==True).order(-Mention.updated) rawmentions = mentionquery.fetch(100) mentions=[] logging.info("listmentions got %s mentions for %s" % (len(rawmentions),target)) for mention in rawmentions: logging.info("rawmention.target '%s' target '%s' %s" % (mention.target,target,mention.target.startswith(target))) if mention.target.startswith(target): mentions.append(mention) if jsonformat: jsonout={'type':'feed','children':[]} for mention in mentions: if mention.sourcejf2: jsonout['children'].append(json.loads(mention.sourcejf2)) else: jsonout['children'].append({ "type": "entry", "published": mention.created.isoformat(), "url": mention.source, }) self.response.headers['Content-Type'] = 'application/json' self.response.write(json.dumps(jsonout)) else: for mention in mentions: mention.humancreated = humanize.naturaltime(mention.created) mention.humanupdated = humanize.naturaltime(mention.updated) mention.prettytarget=cassis.auto_link(mention.target,do_embed=True,maxUrlLength=80) if mention.sourcejf2: name=mention.source jf = json.loads(mention.sourcejf2) logging.info("ListMentions type %s " % (jf.get("type",""))) if jf.get("type","") == "feed": kids= jf.get("children",[{}]) logging.info("ListMentions children %s " % (kids[0])) post = kids[0] elif jf.get("type","") == "entry": logging.info("ListMentions entry %s " % (jf)) post= jf name= post.get("name",mention.source) content = post.get("content",name) mention.prettysource=cassis.auto_link(content,do_embed=True,maxUrlLength=80) else: mention.prettysource=cassis.auto_link(mention.source,do_embed=True,maxUrlLength=80) template_values={'mentions':mentions,'targetdomain':targetdomain} template = JINJA_ENVIRONMENT.get_template('main.html') self.response.write(template.render(template_values))
def add_human_times_to_header_super_verbose(hdr): hdr = dict(hdr) hdr['start'] = dict(hdr['start']) hdr['stop'] = dict(hdr['stop']) hdr['start']['human_time'] = humanize.naturaltime(hdr['start']['time']) hdr['stop']['human_time'] = humanize.naturaltime(hdr['stop']['time']) hdr['stop']['scan_duration'] = humanize.naturaldelta( hdr['stop']['time'] - hdr['start']['time']) return hdr
def test_converts_date_strings_with_days(self): p = Parser('A(1) B.b(a=t"3 days ago")') t = p.steps[0]['filters'][0]['kwargs']['a'] s = str(humanize.naturaltime(t)) self.assertEquals(s, "3 days ago") p = Parser('A(1) B.b(a=t"3 days 15 minutes from now")') t = p.steps[0]['filters'][0]['kwargs']['a'] s = str(humanize.naturaltime(t)) self.assertEquals(s, "3 days from now")
def test_converts_date_strings_with_seconds(self): p = Parser('A(1) B.b(a=t"10 seconds ago")') t = p.steps[0]['filters'][0]['kwargs']['a'] s = str(humanize.naturaltime(t)) self.assertEquals(s, "10 seconds ago") p = Parser('A(1) B.b(a=t"10 seconds from now")') t = p.steps[0]['filters'][0]['kwargs']['a'] s = str(humanize.naturaltime(t)) self.assertEquals(s in ["10 seconds from now", "9 seconds from now"], True)
def naturaltime(datetime): try: locale = babel.app.config['BABEL_DEFAULT_LOCALE'] humanize.activate(locale) return humanize.naturaltime(datetime) except IOError as ioe: current_app.logger.error("An exception raised from the datetime util." + str(ioe)) return humanize.naturaltime(datetime) except Exception as e: current_app.logger.error("An exception raised from the datetime util." + str(e))
def test_converts_date_strings_with_minutes(self): p = Parser('A(1) B.b(a=t"2 minutes ago")') t = p.steps[0]['filters'][0]['kwargs']['a'] t_natural = str(humanize.naturaltime(t)) self.assertEquals(t_natural, "2 minutes ago") p = Parser('A(1) B.b(a=t"10 minutes 10 seconds from now")') t = p.steps[0]['filters'][0]['kwargs']['a'] t_natural = str(humanize.naturaltime(t)) self.assertEquals(t_natural, "10 minutes from now")
def human_readable_datetime(self, dt=None, to_utc=False): """ Convert with `dt` datetime string into a human readable representation using humanize module. """ if dt: if to_utc: diff = datetime.datetime.utcnow() - dt return humanize.naturaltime(diff) else: return humanize.naturaltime(dt)
def prettify(match): """ Prints a match object """ diff = (datetime.datetime.now(pytz.timezone("UTC")) - dateutil.parser.parse(match['datetime'])) seconds = diff.total_seconds() if seconds > 0: if seconds > 60 * 90: status = PAST else: status = NOW else: status = FUTURE if status in [PAST, NOW]: color = colorama.Style.BRIGHT + colorama.Fore.GREEN else: color = colorama.Style.NORMAL + colorama.Fore.WHITE home = match['home_team'] away = match['away_team'] if status == NOW: minute = int(seconds / 60) match_status = "Being played: %s. minute" % minute elif status == PAST: match_status = "Played %s. Winner: %s" % (humanize.naturaltime(diff), match['winner']) else: match_status = "Will be played %s" % humanize.naturaltime(diff) if status == NOW: match_percentage = int(seconds / 60 / 90 * 100) elif status == FUTURE: match_percentage = 0 else: match_percentage = 100 return """ {} {:<30} {} - {} {:>30} {} \xE2\x9A\xBD {} """.format( color, home['country'], home['goals'], away['goals'], away['country'], progress_bar(match_percentage), colorama.Fore.WHITE + match_status )
def humanize_timestamp(value): if value is None: return u'never' if isinstance(value, datetime.datetime): delta = datetime.datetime.now() - value return humanize.naturaltime(delta) if isinstance(value, (int, long, float)): return humanize.naturaltime(time.time() - value) raise TypeError("Unsupported date format: {0!r}".format(value))
def genSection(title, divid, query, colorize): rows = list(enumerate(c.execute(query))) nrows = len(rows) print(""" <div class="panel panel-default"> <div class="panel-heading"> <h2 class="panel-title"> <a data-toggle="collapse" data-parent="#accordion" href="#{divid}"> <strong>{title} ({nrows} issues)</strong> </a> </h2> </div> <div id="{divid}" class="panel-collapse collapse"> <div class="panel-body"> """.format(**locals())) print(""" <table class='sortable table table-condensed table-striped table-hover table-responsive'><thead> <tr><th>#</th><th>Issue</th><th>Reporter</th><th>Assignee</th><th>Created</th><th>Updated</th><th>Milestone</th></tr> </thead><tbody>""") for i,row in rows: created = datetime.strptime(row[3],'%Y-%m-%dT%H:%M:%SZ') hcreated = humanize.naturaltime(created) url = row[0].strip() number = row[1] title = row[2] reporter = row[4] assignee = row[5] updated = datetime.strptime(row[6],'%Y-%m-%dT%H:%M:%SZ') hupdated = humanize.naturaltime(updated) milestone = row[7] status = colorize(locals()) print("""<tr class='{status}'> <td>{i}</td> <td><a href="{url}">{number} - {title}</a></td> <td><a href="https://github.com/{reporter}">{reporter}</a></td> <td><a href="https://github.com/{assignee}">{assignee}</a></td> <td><span class='hidden'>{created}</span>{hcreated}</td> <td><span class='hidden'>{updated}</span>{hupdated}</td> <td>{milestone}</td> </tr>""".format(**locals())) print("""</tbody></table>""") print(""" </div> </div> </div> """) return nrows
def series(bot, trigger): if trigger.group(2) is None: bot.say('You need to give me a series to search!') else: data = tracker.bot_api_request('/series/search?series=' + parse.quote(trigger.group(2))) if 'status_code' in data: bot.say(data['message']) else: items = [EntityGroup([Entity(data['title'])])] details = EntityGroup() details.append(Entity("Network", data['network'])) details.append(Entity("Status", data['status'])) details.append(Entity("Year", data['year'])) if data['air_day'] is not None: details.append(Entity("Airs in", "{} @ {}".format(data['air_day'], data['air_time']))) items.append(details) items.append(Entity(_base_url + "series/" + str(data['slug'] if data['slug'] else data['id']))) bot.say(render(items=items)) if 'id' in data['next_episode']: items_next = [EntityGroup([Entity("Next Episode")])] next_details = EntityGroup() # next_episode = "\00310[\0037 Next Episode \00310]" if data['next_episode']['title'] is None: data['next_episode']['title'] = 'N/A' next_details.append(Entity(data['next_episode']['title'])) next_details.append(Entity("S{:02}E{:02}".format( int(data['next_episode']['season']), int(data['next_episode']['episode'])))) # next_episode += " :: [\0033 " + data['next_episode']['title'] + " \00310|\0033 + " \00310]" items_next.append(next_details) items_next.append(EntityGroup([Entity(humanize.naturaltime( datetime.datetime.utcfromtimestamp(int(data['next_episode']['first_aired']))))])) bot.say(render(items=items_next)) if 'id' in data['last_episode']: items_last = [EntityGroup([Entity("Last Episode")])] next_details = EntityGroup() # next_episode = "\00310[\0037 Next Episode \00310]" if data['last_episode']['title'] is None: data['last_episode']['title'] = 'N/A' next_details.append(Entity(data['last_episode']['title'])) next_details.append(Entity("S{:02d}E{:02d}".format( int(data['last_episode']['season']), int(data['last_episode']['episode'])))) # next_episode += " :: [\0033 " + data['next_episode']['title'] + " \00310|\0033 + " \00310]" items_last.append(next_details) items_last.append(EntityGroup([Entity(humanize.naturaltime( datetime.datetime.utcfromtimestamp(int(data['last_episode']['first_aired']))))])) bot.say(render(items=items_last))
def __init__(self, contents, my_name): if isinstance(contents, (ReplyBook, OldReplyBook)): self.type = 'census' self.username = contents.username self.contents = { 'question_book_title': CachedQuestionBookTitle(contents.question_book_id), 'question_book_id': contents.question_book_id } self.created_at = contents.requested_at elif isinstance(contents, SelfStory): self.type = 'selfstory' self.username = contents.username self.contents = { 'selfstory_id': contents.id, 'selfstory_photo_url': contents.photo_url, 'title': contents.title if contents.title else contents.story, } likes = [like.username for like in SelfStoryLike.query.filter(SelfStoryLike.story_id == contents.id).all()] self.contents['like_cnt'] = str(len(likes)) self.contents['liked_already_by_you'] = True if my_name in likes else False self.created_at = contents.created_at else: raise TimelineException() self.nickname, self.photo_thumbnail_url = CachedUserInfo(self.username) humanize('ko_KR') self.created_at_humanize = naturaltime(self.created_at)
def naturaltime(val): """Get humanized version of time.""" val = val.replace(tzinfo=pytz.utc) \ if isinstance(val, datetime) else parse(val) now = datetime.utcnow().replace(tzinfo=pytz.utc) return humanize.naturaltime(now - val)
async def get_last_seen(): """Get the last_seen status from cache or query it manually.""" # The cache expired and we are forced to query manually cooldown_is_active = await cache.get("telegram_cooldown", default=False) if cooldown_is_active: logging.info("Telegram cache has expired but Telegram API request cooldown is active. Assuming lukas was never online") return datetime.min logging.info("Telegram cache has expired, fetching fresh data.") try: lukas = await client.get_entity('lukasovich') except FloodError: logging.critical("Too many Telegram API requests, engaging cooldown") await cache.set("telegram_cooldown", True, ttl=3600) raise RuntimeError("Too many Telegram API requests") # Check whether he is online right now or get the last_seen status. if isinstance(lukas.status, UserStatusOnline): date = datetime.utcnow() logging.info("Currently online in Telegram.") elif isinstance(lukas.status, UserStatusOffline): date = lukas.status.was_online human_delta = humanize.naturaltime(datetime.now(date.tzinfo) - date) logging.info(f"Last seen in Telegram at {date} ({human_delta}).") else: raise RuntimeError("Lukas changed his privacy settings. We are f****d.") return date
def circus_status(endpoint=None, process=None): default = { 'pid': 'unknown', 'status': 'unknown', 'uptime': 'unknown' } if endpoint and process: client = CircusClient(endpoint=endpoint, timeout=2) try: status = client.send_message('status') stats = client.send_message('stats') # Assuming here there's only a process pid = stats['infos'][process].keys()[0] try: uptime = int(stats['infos'][process][pid]['age']) default['uptime'] = humanize.naturaltime(datetime.datetime.now() - datetime.timedelta(seconds=uptime)) default['pid'] = pid except: # circus running but process stopped pass default['status'] = status['statuses'][process].lower() except Exception as exc: if'TIMED OUT' in exc.message.upper(): # circus stopped default['status'] = 'unknown' return default
def pretty_print_time(timestamp): import humanize import time import datetime dt = datetime.datetime.fromtimestamp(timestamp).isoformat() ago = humanize.naturaltime(time.time() - timestamp) return '{ago} ({date})'.format(ago=ago, date=dt)
def index(): """ Display list of repositories. """ token = get_token() ctx = dict(connected=False) if token is not None and check_token(token): # The user is authenticated and the token we have is still valid. extra_data = token.remote_account.extra_data if extra_data.get("login") is None: init_account(token) extra_data = token.remote_account.extra_data # Check if sync is needed - should probably not be done here now = utcnow() yesterday = now - timedelta(days=1) last_sync = parse_timestamp(extra_data["last_sync"]) if last_sync < yesterday: sync(get_api(), extra_data) token.remote_account.extra_data.changed() db.session.commit() last_sync = utcnow() extra_data = token.remote_account.extra_data ctx.update({ "connected": True, "repos": extra_data['repos'], "name": extra_data['login'], "user_id": token.remote_account.user_id, "last_sync": humanize.naturaltime(now - last_sync), }) return render_template("github/index.html", **ctx)
def print_file_info(files, number = 100): for current_file in files[0:number]: dir_path,file_name,file_size,last_access_time = current_file full_path = os.path.join(dir_path,file_name) file_size = humanize.naturalsize(file_size) last_access_time = humanize.naturaltime(last_access_time) print('Path:{}\n Filesize:{}, Last accessed:{}'.format(full_path,file_size,last_access_time))
def format_haproxy_backend_row(backend, is_correct_instance): """Pretty Prints the status of a given haproxy backend Takes the fields described in the CSV format of haproxy: http://www.haproxy.org/download/1.5/doc/configuration.txt And tries to make a good guess about how to represent them in text """ backend_name = backend['svname'] backend_hostname = backend_name.split("_")[-1] backend_port = backend_name.split("_")[0].split(":")[-1] pretty_backend_name = "%s:%s" % (backend_hostname, backend_port) if backend['status'] == "UP": status = PaastaColors.default(backend['status']) elif backend['status'] == 'DOWN' or backend['status'] == 'MAINT': status = PaastaColors.red(backend['status']) else: status = PaastaColors.yellow(backend['status']) lastcheck = "%s/%s in %sms" % (backend['check_status'], backend['check_code'], backend['check_duration']) lastchange = humanize.naturaltime(datetime.timedelta(seconds=int(backend['lastchg']))) row = ( ' %s' % pretty_backend_name, lastcheck, lastchange, status, ) if is_correct_instance: return row else: return tuple(PaastaColors.grey(remove_ansi_escape_sequences(col)) for col in row)
def humanize_date(d): humanize.i18n.activate('fr') diff = date.today() - d if(diff < timedelta(hours=24)): return "aujourd'hui" return humanize.naturaltime(diff)
def reports_timedelta(): if hasattr(settings, 'DAYS_WITHOUT_REPORT') and \ isinstance(settings.DAYS_WITHOUT_REPORT, int): days = settings.DAYS_WITHOUT_REPORT else: days = 14 return naturaltime(datetime.now() - timedelta(days=days))
def index(): """ halaman index nanti diambilkan data secara random (experiment) """ # data docdb = c["pdfs"] # coba terms = c["terms"] # coba-coba skip_number = random.randint(0, terms.term.find().count() - 10) datacoba = terms.term.find().skip(skip_number).limit(10) # find data which has thumbnail and sort it skip_number = random.randint(0, docdb.pdf.find({"thumb_updated": {"$exists": True}}).count() - 10) # data = [doc for doc in docdb.pdf.find().skip(skip_number).limit(10)] # data = [doc for doc in docdb.pdf.find().limit(10)] # data = [doc for doc in docdb.pdf.find({"thumb_updated": {"$exists": True}}).skip(skip_number).limit(10)] data = [doc for doc in docdb.pdf.find({"thumb_updated": {"$exists": True}}).sort("added", -1).limit(10)] # data = [doc for doc in docdb.pdf.find().sort("_id", -1).limit(10)] # print data # creating fake updated data import humanize for d in data: d["updated"] = humanize.naturaltime(datetime.datetime.now() - datetime.timedelta(seconds=random.randint(0, 600))) # optional thumbnail # jika ada key thumbnail: # thumbnail = True # else # thumbnail = False d["thumbnail"] = False return render_template("index.html", data=data,datacoba=datacoba)
def show_status_vmw(): with open(JSON_FILE, 'r') as infile: data = json.load(infile, object_pairs_hook=OrderedDict) render_data = [] update_time = 'unknown' for garage, stations in data.iteritems(): if garage == 'last_updated_epoch': continue row = {'name': garage, 'stations': []} for station, port_data in stations.iteritems(): col = {'name': station, 'ports': []} total = port_data[0] available = port_data[1] unknown = port_data[2] if total != unknown: for _ in range(available): col['ports'].append(1) for _ in range(total - available): col['ports'].append(0) else: for _ in range(total): col['ports'].append(-1) row['stations'].append(col) render_data.append(row) if 'last_updated_epoch' in data: update_time = humanize.naturaltime(get_epoch() - data['last_updated_epoch']) return render_template('dashboard.html', title='VMW Chargepoint Stations', garages=render_data, last_update=update_time)
def scrape_urls(bot, event): session = Session() user = session.query(User).filter_by(nick=event.source.nick).first() if not user: user = User(nick=event.source.nick) session.add(user) urls = [url for arg in event.arguments for url in URL_PATTERN.findall(arg)] for url in urls: u = session.query(Url).filter_by(url=url).first() if not u: u = Url( url=url, posted_by=event.source.nick, ) user.posts += 1 session.add(u) u.last_seen = datetime.utcnow() if u.posted_by != event.source.nick: user.reposts += 1 ago = datetime.utcnow() - u.first_seen bot.say_to(event, "repost, {} posted this {}", u.posted_by, humanize.naturaltime(ago)) session.commit()
def serve_feeds_with_template(): current_time = time.gmtime() current_local_time = time.localtime() try: crss = CouchRSS(os.environ['COUCH'], os.environ['READABILITY'], \ os.environ['VIEW1'], os.environ['VIEW2'], os.environ['VIEW3']) print 'environment vars startup successful' except KeyError: print 'environment vars startup failed' feed_list = crss.get_feeds() all_articles = [] for i in range(0, len(feed_list)): feed_start_time = time.gmtime() d = feedparser.parse(feed_list[i]) this_feed = [] for j in range(0, 5): try: arty_date = humanize.naturaltime(time.mktime(current_time) - \ time.mktime(d.entries[j].published_parsed)) arty_link = d.entries[j].link except: arty_date = "?" arty_link = 'https://2names1scott.com/' this_feed.append([d.entries[j].title, arty_date, arty_link]) all_articles.append([d.feed.title, fix_array_of_urls(this_feed)]) print "Finished processing feed {} in {}".format( \ feed_list[i], time.mktime(time.gmtime()) - time.mktime(feed_start_time)) return template('feeds', current_local_time=current_local_time, \ all_articles=all_articles)
def parseAnswerHeaders(args, data): """ parseAnswerHeaders(args, data): Parse the headers out of our answer """ retval = {} # # RR bytes: # # 0-1: Bits 2-15 contain the offset to the question that this answer answers. # 2-3: Type # 4-5: Class # 6-9: TTL # 10-11: RDLENGTH # 12+: RDDATA (The answer!) # # # Set our offsets for the different parts of the Answer Header. # offset_type = 2 offset_class = 4 offset_ttl = 6 offset_rdlength = 10 # # This is going to be the angriest comment of my entire career. # Remember the part above where I saw the first two bytes are the offset # to the question? Well, if you do a specific type of query--a query against # a non-existent TLD, you can forget what I just said. In the case of a # non-existant TLD such as "testing.bad", you won't get back a pointer to the question, nope! # Instead what you'll get is a single byte which has the value of zero. Awesome! # # I don't know if it was mentioned somewhere in RFC 1035 and I just missed it, # or if the behavior of DNS changed in a later RFC. Either wya, this bug vexed me # for WEEKS until I started going through actualy hex dumps and tracked it down. # # /rant # if data[0] == 0: offset_type -= 1 offset_class -= 1 offset_ttl -= 1 offset_rdlength -= 1 retval["type"] = (256 * data[offset_type]) + data[offset_type + 1] retval["class"] = (256 * data[offset_class]) + data[offset_class + 1] retval["type_text"] = parse_question.parseQtype(retval["type"]) retval["class_text"] = parse_question.parseQclass(retval["class"]) #data = data[0:6] + struct.pack("B", 48) + data[7:] # Debugging - Make the TTL 25+ years if args.fake_ttl: logger.debug( "parseAnswerHeaders(): --fake-ttl is set, setting TTL to -1") retval["ttl"] = -1 else: retval["ttl"] = (16777216 * data[offset_ttl]) + (65536 * data[offset_ttl + 1]) + ( 256 * data[offset_ttl + 2]) + data[offset_ttl + 3] retval["ttl_text"] = humanize.naturaltime(datetime.datetime.now() + datetime.timedelta( seconds=retval["ttl"])) retval["rdlength"] = (256 * data[offset_rdlength]) + data[offset_rdlength + 1] return (retval)
def when_registered(self): return humanize.naturaltime(self.confirmed_at)
def naturaltime(val): """ Get humanized version of time. """ val = parse(val) now = datetime.utcnow().replace(tzinfo=pytz.utc) return humanize.naturaltime(now - val)
def ago_display(when): if when is None: return "never" td = datetime.utcnow() - when return naturaltime(td)
def naturaltime(dt): current_datetime = dt.strftime('%Y-%m-%d %H:%M:%S') return humanize.naturaltime( dt.strptime(current_datetime, '%Y-%m-%d %H:%M:%S'))
def _set_problem(self, problem): def destroy_links(widget, _): if widget != self.lbl_reported_value: widget.destroy() self.selected_problem = problem sensitive_btn = problem is not None self.btn_delete.set_sensitive(sensitive_btn) self.btn_report.set_sensitive(sensitive_btn and not problem['not-reportable']) self.vbx_links.foreach(destroy_links, None) self.vbx_problem_messages.foreach(lambda w, u: w.destroy(), None) if problem: self.nb_problem_layout.set_current_page(0) app = problem['application'] if problem['type'] == 'Kerneloops': self.lbl_reason.set_text(_("Unexpected system error")) self.lbl_summary.set_text( _("The system has encountered a problem and recovered.")) elif problem['type'] == 'vmcore': self.lbl_reason.set_text(_("Fatal system failure")) self.lbl_summary.set_text( _("The system has encountered a problem and could not continue." )) else: if not app.name: self.lbl_reason.set_text( # Translators: If Application's name is unknown, # display neutral header # "'Type' problem has been detected". Examples: # Kerneloops problem has been detected # C/C++ problem has been detected # Python problem has been detected # Ruby problem has been detected # VMCore problem has been detected # AVC problem has been detected # Java problem has been detected _("{0} problem has been detected").format( problem['human_type'])) else: self.lbl_reason.set_text( _("{0} quit unexpectedly").format(app.name)) self.lbl_summary.set_text( _("The application encountered a problem and could not continue." )) self.lbl_app_name_value.set_text( # Translators: package name not available problem['package_name'] or _("N/A")) self.lbl_app_version_value.set_text( # Translators: package version not available problem['package_version'] or _("N/A")) self.lbl_detected_value.set_text( humanize.naturaltime(datetime.datetime.now() - problem['date'])) self.lbl_detected_value.set_tooltip_text(problem['date'].strftime( config.get_configuration()['D_T_FMT'])) icon_buf = None scale = self.img_app_icon.get_scale_factor() if app.icon: icon_buf = load_icon(gicon=app.icon, scale=scale) if icon_buf is None: icon_buf = load_icon(name="system-run-symbolic", scale=scale) self.img_app_icon.get_style_context().add_class('dim-label') else: self.img_app_icon.get_style_context().remove_class('dim-label') # icon_buf can be None and if it is None, no icon will be displayed set_icon_from_pixbuf_with_scale(self.img_app_icon, icon_buf, scale) self.lbl_reported_value.show() self.lbl_reported.set_text(_("Reported")) if problem['not-reportable']: self.lbl_reported_value.set_text(_('cannot be reported')) self._show_problem_links(problem['submission']) self._show_problem_message(problem['not-reportable']) elif problem['is_reported']: if self._show_problem_links(problem['submission']): self.lbl_reported.set_text(_("Reports")) self.lbl_reported_value.hide() if (not any((s.name == "Bugzilla" for s in problem['submission']))): self._show_problem_message( _("This problem has been reported, but a <i>Bugzilla</i> ticket has not" " been opened. Our developers may need more information to fix the problem.\n" "Please consider also <b>reporting it</b> to Bugzilla in" " order to provide that. Thank you.")) else: # Translators: Displayed after 'Reported' if a problem # has been reported but we don't know where and when. # Probably a rare situation, usually if a problem is # reported we display a list of reports here. self.lbl_reported_value.set_text(_('yes')) else: # Translators: Displayed after 'Reported' if a problem # has not been reported. self.lbl_reported_value.set_text(_('no')) else: if self._source is not None: self.nb_problem_layout.set_current_page(1) else: self.nb_problem_layout.set_current_page(2)
def mv_to_row(mv: MaintenanceView, cv: ClusterView) -> Tuple[str, ...]: id = mv.group_id affected = shorten(",".join(f"N{ni}" for ni in mv.affected_node_indexes), 30, placeholder="...") status = colored(mv.overall_status.name, _color(mv.overall_status)) if mv.affects_shards: if mv.is_blocked: color = "red" elif not mv.are_all_shards_done: color = "yellow" else: color = "green" shard_progress = colored( # pyre-ignore f"{mv.shard_target_state.name}" f"({mv.num_shards_done}/{mv.num_shards_total})", color=color, ) else: shard_progress = "-" if mv.affects_sequencers: if mv.are_all_sequencers_done: color = "green" else: color = "yellow" sequencer_progress = colored( # pyre-ignore f"{mv.sequencer_target_state.name}" f"({mv.num_sequencers_done}/{mv.num_sequencers_total})", color=color, ) else: sequencer_progress = "-" if mv.reason: created_by = shorten(f"{mv.user} ({mv.reason})", 40, placeholder="...") else: created_by = f"{mv.user}" if mv.created_on: created_on = str(mv.created_on) else: created_on = "-" if mv.expires_on: expires_on = naturaltime(mv.expires_on) else: expires_on = "-" return ( id, affected, status, shard_progress, sequencer_progress, created_by, created_on, expires_on, )
def slang_time(self): """"Returns human slang representation of time.""" dt = self.datetime(naive=True, to_timezone=self.local_timezone) return humanize.naturaltime(dt)
def mentionmention(self, to, wait, text, dataMid=[], pl='', ps='', pg='', pt=[]): arr = [] list_text = ps i = 0 no = pl if pg == 'MENTIONALLUNSED': for l in dataMid: no += 1 if no == len(pt): list_text += '\n╰' + str(no) + '. @[RhyN-' + str(i) + '] ' else: list_text += '\n│' + str(no) + '. @[RhyN-' + str(i) + '] ' i = i + 1 text = list_text + text if pg == 'SIDERMES': for l in dataMid: chiya = [] for rom in wait["lurkt"][to][dataMid[0]].items(): chiya.append(rom[1]) for b in chiya: a = '{}'.format( humanize.naturaltime(datetime.fromtimestamp(b / 1000))) no += 1 if no == len(pt): list_text += '\n│' + str(no) + '. @[RhyN-' + str( i) + ']\n╰ 「 ' + a + " 」" else: list_text += '\n│' + str(no) + '. @[RhyN-' + str( i) + ']\n│ 「 ' + a + " 」" i = i + 1 text = list_text + text if pg == 'DELFL': for l in dataMid: try: self.deleteContact(l) a = 'Del Friend' except: a = 'Not Friend User' no += 1 if no == len(pt): list_text += '\n╰' + str(no) + '. @[RhyN-' + str( i) + '] ' + a else: list_text += '\n│' + str(no) + '. @[RhyN-' + str( i) + '] ' + a i = i + 1 text = text + list_text if pg == 'DELML': for l in dataMid: if l not in settings["mimic"]["target"]: a = 'Not ML User' else: a = 'DEL ML' settings["mimic"]["target"].remove(l) no += 1 if no == len(pt): list_text += '\n╰' + str(no) + '. @[RhyN-' + str( i) + '] ' + a else: list_text += '\n│' + str(no) + '. @[RhyN-' + str( i) + '] ' + a i = i + 1 text = list_text i = 0 for l in dataMid: mid = l name = '@[RhyN-' + str(i) + ']' ln_text = text.replace('\n', ' ') if ln_text.find(name): line_s = int(ln_text.index(name)) line_e = (int(line_s) + int(len(name))) arrData = {'S': str(line_s), 'E': str(line_e), 'M': mid} arr.append(arrData) i = i + 1 contentMetadata = { 'MENTION': str('{"MENTIONEES":' + json.dumps(arr).replace(' ', '') + '}') } if pg == 'MENTIONALLUNSED': self.unsendMessage(self.sendMessage(to, text, contentMetadata).id) else: self.sendMessage(to, text, contentMetadata)
def changed_on_humanized(self) -> str: return humanize.naturaltime(datetime.now() - self.changed_on)
def check_time(date, time): time = str(time) time2 = (time) date = str(date) ##print(date, time) d1, d2, d3 = str.split(date, '/') newd = d1 + '/' + d2 t1, t2 = str.split(time2, ':') if int(t1) > 12: ###print('over12 ', t1) t1 = int(t1) - 12 time2 = str(t1) + ':' + t2 + ' PM' else: time2 = time2 + ' AM' ##print time2 aa = datetime.strptime(date + ' ' + time, '%m/%d/%Y %H:%M') # ##print (aa) now = (datetime.now()) diff = now - aa global past diffh = humanize.naturaltime(now - aa) past = False neg = (str(diff)[0]) if neg == '-': past = True diff2 = -diff else: diff2 = diff day = '' try: day, hours = str.split(str(diff2), ' days, ') hours = str(hours) if int(day) == 1: day = day + ' day, ' else: day = day + ' days, ' except: try: # ##print(diff2, 'GODAMN') hours = str(diff2) # ##print('`````````````0 days') except: day, hours = str.split(str(diff2), ' day, ') # ##print('``````````1 day') # ##print("NNNNNN") hours, minutes, junk = str.split(hours, ':') # ##print(hours) # ##print("OOOOOOOOO") hours = str(hours) if hours == '0': hours = '' else: # ##print(hours, ' hours') try: day, hours = str.split(hours, ', ') day = day + ', ' except: ###print('dumb') pass # ##print hours ##hours = str(int(hours)) if int(hours) == 1: hours = hours + ' hour ' else: hours = hours + ' hours ' # ##print('asdfadsf') if str(minutes) == 1: minutes = str(int(minutes)) + ' minute' else: minutes = str(int(minutes)) + ' minutes' # ##print('erererer') if past == False: # ##print('ok') try: a = day + hours + minutes + ' Ago' except: a = 'ERROR' # ##print('no2') else: # ##print('no3') try: a = day + hours + minutes + ' Away' except: a = 'ERROR2' # ##print('no4') # ##print a return (a, time2)
def naturaltime_without_indefinite_article(date): return re.sub( 'an? (.*) ago', lambda match: '1 {} ago'.format(match.group(1)), humanize.naturaltime(date), )
def _last_run_delta_humanized(self) -> str: return naturaltime(datetime.now() - self.changed_on)
def do_openstack(self, args, arguments): """ :: Usage: openstack info openstack yaml openstack yaml list [CLOUD] openstack image list [CLOUD] [--format=FORMAT] openstack flavor list [CLOUD] [--format=FORMAT] openstack vm list [CLOUD] [--user=USER] [--format=FORMAT] [--ip=public|private] This command does some useful things. Arguments: FILE a file name Options: -f specify the file """ # print(arguments) default = Default() cloud = arguments.CLOUD or default["global"]["cloud"] default.close() arguments.format = arguments["--format"] or 'table' arguments.user = arguments["--user"] fd = None if arguments.info: if arguments.CLOUD is None: arguments.CLOUD = cloud provider = OpenStack(arguments.CLOUD) provider.information() elif arguments.yaml and arguments.list: filename = path_expand("~/.cloudmesh/cloudmesh.yaml") content = readfile(filename) d = yaml.load(content, Loader=yaml.RoundTripLoader) if arguments.CLOUD is None: default_cloud = default["global"]["cloud"] # print (yaml.dump(d, indent=4, Dumper=yaml.RoundTripDumper)) info = OrderedDict() clouds = d["cloudmesh"]["clouds"] for cloud in clouds: info[cloud] = { "default": "", "name": cloud, "type": clouds[cloud]["cm_type"], "label": clouds[cloud]["cm_label"], "flavor": clouds[cloud]["default"]["flavor"], "image": clouds[cloud]["default"]["image"] } if default_cloud == cloud: info[cloud]["default"] = "*" print(Printer.dict(info, order=["default", "name", "type", "label", "flavor", "image"])) else: cloud = arguments.CLOUD clouds = d["cloudmesh"]["clouds"] print(yaml.dump(clouds[cloud], indent=4, Dumper=yaml.RoundTripDumper)) elif arguments.yaml: filename = path_expand("~/.cloudmesh/cloudmesh.yaml") content = readfile(filename) d = yaml.load(content, Loader=yaml.RoundTripLoader) print(yaml.dump(d, indent=4, Dumper=yaml.RoundTripDumper)) elif arguments.image and arguments.list: if arguments.CLOUD is None: arguments.CLOUD = cloud # print (arguments.CLOUD) provider = OpenStack(arguments.CLOUD) images = provider.images() try: fd = flatme(images) except Exception as e: Error.traceback(error=e, debug=True, trace=True) order = ["name", "extra__metadata__user_id", "extra__metadata__image_state", "extra__updated"] header = ["name", "user", "state", "updated"] if arguments.format == "table": print(arguments.CLOUD) print(Printer.dict(fd, sort_keys="name", order=order, header=header, output=arguments.format)) # elif arguments.format == "dict": # print(yaml.dump(images, indent=4, Dumper=yaml.RoundTripDumper)) else: print(Printer.dict(images, output=arguments.format)) elif arguments.flavor and arguments.list: if arguments.CLOUD is None: arguments.CLOUD = cloud # print (arguments.CLOUD) provider = OpenStack(arguments.CLOUD) d = provider.flavors() print(arguments.CLOUD) print(Printer.dict(d, sort_keys="id", output=arguments.format, order=['id', 'name', 'ram', 'vcpus', 'disk'])) elif arguments.vm and arguments.list: if arguments.CLOUD is None: arguments.CLOUD = cloud # print (arguments.CLOUD) provider = OpenStack(arguments.CLOUD) elements = provider.vms() if arguments.user is not None: found = {} for element in elements: if elements[element]['extra']['userId'] == arguments.user: found[element] = elements[element] elements = found try: fd = flatme(elements) except Exception as e: Error.traceback(error=e, debug=True, trace=True) order = ["name", 'extra__vm_state', 'extra__metadata__image', 'extra__metadata__flavor', "extra__key_name", 'extra__metadata__group', "extra__userId", "extra__created", 'private_ips', 'public_ips'] header = ["name", "state", "image", "flavor", "key", "group", "user", "created", "private", "public"] if arguments.format == "table" or arguments.format == "inventory": for element in fd: fd[element]['private_ips'] = ','.join(fd[element]['private_ips']) fd[element]['public_ips'] = ','.join(fd[element]['public_ips']) # fd[element]["extra__created"] = humanize.timedelta(fd[element]["extra__created"]) t = humanize.naturaltime(timestring.Date(fd[element]["extra__created"]).date) fd[element]["extra__created"] = t if arguments["--ip"]: kind = arguments["--ip"] ips = {} for element in fd: ips[element] = { 'name': fd[element]['name'], kind: fd[element][kind + '_ips'] } if arguments.format == 'inventory': # print ("[hosts]") for host in ips: print(ips[host][kind]) else: print(Printer.dict(ips, # sort_keys=True, order=["name", kind], output=arguments.format)) else: print(arguments.CLOUD) print(Printer.dict(fd, # sort_keys=True, order=order, header=header, output=arguments.format)) # elif arguments.format == "dict": # print(yaml.dump(images, indent=4, Dumper=yaml.RoundTripDumper)) elif arguments.format == 'flatten': pprint(fd) else: print(Printer.dict(elements, output=arguments.format)) return ""
def humantime(dt): """ Format a datetime time in a human-friendly way """ return naturaltime(dt)
def extmon(config, stop): stash = {FEEDS: {}, CHANGED: True, LASTCHANGE: 0} stashlock = Lock() templatedir = split(config.get(EXTMON2, 'template'))[0] templatefile = split(config.get(EXTMON2, 'template'))[1] wwwfile = join(config.get(EXTMON2, 'wwwpath'), 'index.html') feeds_list = config.get(EXTMON2, 'feeds') for feed in feeds_list: guid = config.get(feed, 'guid') stash[FEEDS][guid] = config.get(feed) stash[FEEDS][guid][SEEN] = False stash[FEEDS][guid][LAST_SEEN] = datetime.utcnow() max_age = stash[FEEDS][guid][MAX_AGE] = int( stash[FEEDS][guid][MAX_AGE]) if WARN_AGE not in stash[FEEDS][guid]: stash[FEEDS][guid][WARN_AGE] = max_age * 2 else: stash[FEEDS][guid][WARN_AGE] = int(stash[FEEDS][guid][WARN_AGE]) if ERROR_AGE not in stash[FEEDS][guid]: stash[FEEDS][guid][ERROR_AGE] = max_age * 3 else: stash[FEEDS][guid][ERROR_AGE] = int(stash[FEEDS][guid][ERROR_AGE]) client = IOT.Client(config=config.get(EXTMON2, 'agent')) client.register_catchall_feeddata( partial(__feeddata, client, stash, stashlock)) while not stop.is_set(): with client: try: thing = client.create_thing("extmon2") except: logger.error("Failed to create_thing(extmon2). Giving up.") stop.set() return with stashlock: for guid in stash[FEEDS]: try: thing.follow(guid) except: logger.error("Failed to follow('%s'). Giving up.", guid) stop.set() return if NAME not in stash[FEEDS][guid]: desc = client.describe(guid) if desc is None: stash[FEEDS][guid][ NAME] = 'No Public Meta GUID: ' + guid else: stash[FEEDS][guid][NAME] = desc['meta']['label'] while not stop.is_set(): with stashlock: if stash[CHANGED] or monotonic( ) - stash[LASTCHANGE] >= MINCHANGE: logger.debug("Stash changed, updating HTML") nowtime = datetime.utcnow() stash[LASTCHANGE] = monotonic() for guid in stash[FEEDS]: delta = nowtime - stash[FEEDS][guid][LAST_SEEN] delta_secs = delta.total_seconds() stash[FEEDS][guid][LASTSEEN] = naturaltime( delta_secs) if delta_secs < stash[FEEDS][guid][ MAX_AGE] and stash[FEEDS][guid][SEEN]: stash[FEEDS][guid][CLASS] = 'green' elif delta_secs < stash[FEEDS][guid][WARN_AGE]: stash[FEEDS][guid][CLASS] = 'yellow' else: stash[FEEDS][guid][CLASS] = 'red' if stash[FEEDS][guid][SEEN] is False: stash[FEEDS][guid][ LASTSEEN] = "Not seen since restart: " + stash[ FEEDS][guid][LASTSEEN] j2env = Environment( loader=FileSystemLoader(templatedir), trim_blocks=True) with open(wwwfile, 'w') as f: f.write( j2env.get_template(templatefile).render( feeds=stash[FEEDS])) stash[CHANGED] = False stop.wait(timeout=1) # If this function ends prematurely ensure stop is set! stop.set()
def subtract_time(start, end): """Get humanized time""" subtracted = humanize.naturaltime(start - end) return str(subtracted)
def modified(self): time_str = humanize.naturaltime(datetime.now() - self.changed_on) return Markup(f'<span class="no-wrap">{time_str}</span>')
def format_timestamp(tstamp): return naturaltime(datetime_from_utc_to_local(parse_timestamp(tstamp)))
def humanize_alt_filter(time): if not time: return "N/A" return humanize.naturaltime(datetime.now() - time)
def timeago(dt): return humanize.naturaltime(datetime.now() - dt)
def status(self, return_code_diff=False, sections=None): """Prints the current task status Parameters ---------- sections : list, optional Sections to include. Defaults to "name", "last_run", "oudated", "product", "doc", "location" """ sections = sections or [ 'name', 'last_run', 'outdated', 'product', 'doc', 'location' ] p = self.product data = {} if 'name' in sections: data['name'] = self.name if 'type' in sections: data['type'] = type(self).__name__ if 'status' in sections: data['status'] = self.exec_status.name if 'client' in sections: # FIXME: all tasks should have a client property data['client'] = (repr(self.client) if hasattr(self, 'client') else None) if 'last_run' in sections: if p.metadata.timestamp is not None: dt = datetime.fromtimestamp(p.metadata.timestamp) date_h = dt.strftime('%b %d, %y at %H:%M') time_h = humanize.naturaltime(dt) data['Last run'] = '{} ({})'.format(time_h, date_h) else: data['Last run'] = 'Has not been run' outd_data = p._outdated_data_dependencies() outd_code = p._outdated_code_dependency() outd = False if outd_code: outd = 'Source code' if outd_data: if not outd: outd = 'Upstream' else: outd += ' & Upstream' if 'outdated' in sections: data['Outdated?'] = outd if 'outdated_dependencies' in sections: data['Outdated dependencies'] = outd_data if 'outdated_code' in sections: data['Outdated code'] = outd_code if outd_code and return_code_diff: data['Code diff'] = (self.dag.differ.get_diff( p.metadata.stored_source_code, str(self.source), extension=self.source.extension)) else: outd_code = '' if 'product_type' in sections: data['Product type'] = type(self.product).__name__ if 'product' in sections: data['Product'] = repr(self.product) if 'product_client' in sections: # FIXME: all products should have a client property data['Product client'] = (repr(self.product.client) if hasattr( self.product, 'client') else None) if 'doc' in sections: data['Doc (short)'] = _doc_short(self.source.doc) if 'location' in sections: data['Location'] = self.source.loc return Row(data)
def humanize_timestamp(ts): # Convert timestamp to datetime if ts is None: return "-" return humanize.naturaltime(datetime.fromtimestamp(int(ts)))
def pubdate(self): return humanize.naturaltime(self.publish_date)
def _set_callback(self, job_run): """Set a callback for JobRun to fire at the appropriate time.""" seconds = job_run.seconds_until_run_time() human_time = humanize.naturaltime(seconds, future=True) log.info(f"Scheduling {job_run} {human_time} ({seconds} seconds)") reactor.callLater(seconds, self.run_job, job_run)
def pretty_update(self): if self.last_updated is None: return "never" return humanize.naturaltime(datetime.datetime.now() - self.last_updated)
def time_humanize(timestamp): mdate = datetime.utcfromtimestamp(timestamp) return humanize.naturaltime(mdate)
def modified(self): s = humanize.naturaltime(datetime.now() - self.changed_on) return Markup('<span class="no-wrap">{}</span>'.format(s))
async def redditstats(self, ctx, user): trophies = [] i = [] td = { True: "<:on:732805104620797965>", False: "<:off:732805190582927410>" } async with ctx.typing(): async with aiohttp.ClientSession() as cs: async with cs.get( f"https://www.reddit.com/user/{user}/trophies/.json" ) as r: res = await r.json() async with cs.get( f"https://www.reddit.com/user/{user}/about/.json" ) as re: k = await re.json() if r.status != 200 or re.status != 200: if r.status or re.status == 400: custom_message = " Redditor not found. " else: custom_message = "\u200b" return await ctx.send( f"Whoops, something went wrong.{custom_message}Error Codes: {r.status}, {re.status}" ) for item in res['data']['trophies']: if str(item['data']['name']).lower() in emotes: trophies.append(emotes[str(item['data']['name']).lower()]) else: trophies.append(" ") for t in trophies: if t not in i: i.append(t) cake = " <:cakeday:736660679938932876>" if await self.cakeday( user) else '' icon = k['data']['icon_img'] icon = icon.split("?")[0] banner = k['data']['subreddit']['banner_img'] banner = banner.split("?")[0] embed = discord.Embed(color=self.client.colour) embed.set_thumbnail(url=icon) embed.url = f"https://reddit.com/user/{user}" embed.set_author( name=f"{k['data']['subreddit']['title']}", url=f"https://reddit.com/user/{user}" ) if f"{k['data']['subreddit']['title']}" else None embed.title = k['data']['name'] + cake embed.description = f"<:karma:704158558547214426> **{k['data']['link_karma'] + k['data']['comment_karma']:,}** | 🔗 **{k['data']['link_karma']:,}** 💬 **{k['data']['comment_karma']:,}**" embed.description += f"\n<:asset:734531316741046283> [Icon URL]({icon})" if banner: embed.description += f" | [Banner URL]({banner})" embed.description += "\n" + ' '.join(i) dt = datetime.datetime.utcfromtimestamp(k['data']['created_utc']) created = humanize.naturaltime(datetime.datetime.utcnow() - dt) embed.add_field( name="Account Settings", value= f'{td[k["data"]["verified"]]} **Verified**\n{td[k["data"]["is_mod"]]} **Is Mod**\n{td[k["data"]["hide_from_robots"]]} **Hide From Robots**\n{td[k["data"]["has_subscribed"]]} **Has Subscribed**' ) embed.set_footer(text=f'Account created {created}') await ctx.send(embed=embed)
def format_time(time): if time is None or time < UNKNOWN_CUTOFF: return "Unknown" return "{} ({} UTC)".format( humanize.naturaltime(time + (datetime.now() - datetime.utcnow())), time)