def handle_cmd(self, cmd, remainder, msg): nick = self.mapname(msg.nick).casefold() self.seen[nick] = time() if msg.admin or msg.staff: self.seen["@{}".format(nick)] = time() if msg.logged_in: self.seen["+{}".format(nick)] = time() if cmd != "!seen": return False if not self.allowed(msg) or not remainder: return False remainder = remainder.strip() remainder = self.mapname(remainder) crem = remainder.casefold() seen = self.seen.get(crem) if not seen: cur = self.conn.cursor() cur.execute("SELECT time FROM seen WHERE user = ?", (crem,)) seen = cur.fetchone() seen = seen and int(seen[0]) / 1000 if remainder.lower() == "lain": self.post( "Lain was never here, will never come here, " "and does not care about volafile at all. Please donate!") elif not seen: self.post( "I have not seen {} since {}", remainder, naturaldelta(time() - self.start)) else: self.post( "{} was last seen {} ago", remainder, naturaldelta(time() - seen)) return True
def print_datapoint(point, base_indent=' '): this_indent = base_indent + ' ' time_as_code = get_timecode(point) time_str = point.time_as_dt.strftime(time_format) spaces_betw_time_and_id = (console_width - 1 - len(time_as_code) - len(time_str) - len(this_indent)) if point.__dict__.get('finished', False): iprint(icon.right_arrowhead + ' ' + time_str + ' ' * spaces_betw_time_and_id + '[' + time_as_code + ']', '', formatting=['bold', 'okgreen']) iprint('%s %s on %s (%s)' % ( icon.check, datetime.datetime.fromtimestamp(point.finished['time']).strftime(time_format), point.finished['host'], humanize.naturaldelta(int(point.finished['time']) - int(point.time))), base_indent, formatting=['bold', 'okgreen']) else: iprint(icon.right_arrowhead + ' ' + time_str + ' ' * spaces_betw_time_and_id + '[' + time_as_code + ']', '', formatting=['bold']) iprint(point.task, this_indent) if point.time != point.created: if point.created_as_dt.date() == point.time_as_dt.date(): iprint('(created @%s)\n' % (point.created_as_dt.strftime(time_format)), this_indent) else: iprint('(created @%s)\n' % (point.created_as_dt.strftime(date_format + '-' + time_format)), this_indent) else: iprint('pc was up for %s\n' % humanize.naturaldelta(point.uptime), this_indent)
def on_message(msg, server): text = msg.get("text", "") match = re.findall(r"!status( .*)?", text) if not match: return session = Session() try: realm = session.query(Realm).filter(Realm.name == wowconfig["realm"]).one() status = u"Currently *{0}* is ".format(realm.name) if realm.online: status += "online (last check: {0})".format( humanize.naturaltime(realm.lastchecked) ) status += "\n\n*Battleground status*:\n```" for area in realm.areas: status += "{0} : {1} controlled : {2} : next in {3}\n".format( area.name, area.faction, area.status, humanize.naturaldelta(area.next), ) status += "```" else: status += "offline (last check: {0}, last seen: {1})".format( humanize.naturaltime(realm.lastchecked), humanize.naturaltime(realm.lastseen) ) return status except NoResultFound: return u"No status known on *{0}*".format(wowconfig["realm"])
def cb(tx_bytes, total_bytes): total_time = datetime.now() - start_time total_time = total_time.total_seconds() total_time_s = floor(total_time) if (total_time_s % LOG_INTERVAL) != 0: return nsize_tx = naturalsize(tx_bytes, binary=True, format='%.2f') nsize_total = naturalsize(total_bytes, binary=True, format='%.2f') speed_in_s = tx_bytes / total_time speed_in_s = naturalsize(speed_in_s, binary=True, format='%.2f') _log.info('Downloaded {} / {} in {} ({}/s)'.format( nsize_tx, nsize_total, naturaldelta(datetime.now() - start_time), speed_in_s, total_time_s))
def get_client_proxy_info(profile): """ Returns information on the current proxy (if available) """ info = {'user_proxy': False} if ('SSL_CLIENT_M_SERIAL' not in request.environ or 'SSL_CLIENT_V_END' not in request.environ or 'SSL_CLIENT_I_DN' not in request.environ or request.environ.get('SSL_CLIENT_VERIFY') != 'SUCCESS'): info['user_cert'] = False else: info['user_dn'] = request.environ['SSL_CLIENT_S_DN'] info['user_cert'] = request.environ['SSL_CLIENT_CERT'] if profile.user_proxy: px = x509.load_pem_x509_certificate( profile.user_proxy.encode('ascii', 'ignore'), default_backend() ) not_after = px.not_valid_after.replace(tzinfo=pytz.utc) time_left = not_after - datetime.now(tz=pytz.utc) # let's consider a valid proxy if you have at least 10 min if time_left.total_seconds() > 600: info['user_proxy'] = True info['user_proxy_time_left'] = humanize.naturaldelta(time_left) return info
def handle_cmd(self, cmd, remainder, msg): if msg.admin and msg.logged_in: AsleepCommand.last = datetime.now(), msg.nick if cmd != "!asleep": return False if not AsleepCommand.last: self.post("Mods are asleep") elif AsleepCommand.last[0] + timedelta(minutes=20) <= datetime.now(): self.post("Mods have been asleep since {}", naturaldelta(AsleepCommand.last[0])) else: self.post( "{} was awake and trolling {} ago", AsleepCommand.last[1], naturaldelta(AsleepCommand.last[0])) return True
async def _print_router_info(router, agent=None): loc = await router.get_location() print(u" name: {}".format(router.name)) print(u" hex id: {}".format(router.id_hex)) print(u"id hash (base64): {}".format(hashFromHexId(router.id_hex))) print(u" location: {}".format("unknown" if loc.countrycode is None else loc.countrycode)) print(u" address: {}:{} (DirPort={})".format(router.ip, router.or_port, router.dir_port)) print(u" flags: {}".format(" ".join(router.flags))) diff = datetime.datetime.utcnow() - router.modified print(u" last published: {} ago ({})".format(humanize.naturaldelta(diff), router.modified)) if agent: print(util.colors.italic("Extended information from" + util.colors.green(" onionoo.torproject.org") + ":")) details = await router.get_onionoo_details(agent) details.setdefault('dir_address', '<none>') details.setdefault('city_name', 'unknown') details.setdefault('region_name', 'unknown') details.setdefault('country_name', 'unknown') details['or_addresses'] = ', '.join(details.get('or_addresses', [])) details['verified_host_names_formatted'] = ', '.join(details['verified_host_names']) print( u" platform: {platform}\n" u" runnning: {running}\n" u" dir_address: {dir_address}\n" u" OR addresses: {or_addresses}\n" u" location: {city_name}, {region_name}, {country_name}\n" u" host names: {verified_host_names_formatted}\n" u" AS: {as} ({as_name})\n" u" last restarted: {last_restarted}\n" u" last changed: {last_changed_address_or_port}\n" u" last seen: {last_seen}\n" u" probabilities: guard={guard_probability} middle={middle_probability} exit={exit_probability}\n" u"".format(**details) )
def overall_stats(metadata: LanguageIndex) -> Dict[str, Any]: good_annotations = count_annotations(metadata, 'good') bad_annotations = count_annotations(metadata, 'bad') total_annotations = good_annotations + bad_annotations num_1_annotations = 0 num_5_annotations = 0 num_10_annotations = 0 for lang, samples in metadata.items(): n_good = count_lang_annotations(samples, 'good') if n_good >= 1: num_1_annotations += 1 if n_good >= 5: num_5_annotations += 1 if n_good >= 10: num_10_annotations += 1 time_annotated = humanize.naturaldelta(total_annotations * 20) samples = {lang: len(xs) for lang, xs in metadata.items()} return locals()
def add_human_times_to_header_super_verbose(hdr): hdr = dict(hdr) hdr['start'] = dict(hdr['start']) hdr['stop'] = dict(hdr['stop']) hdr['start']['human_time'] = humanize.naturaltime(hdr['start']['time']) hdr['stop']['human_time'] = humanize.naturaltime(hdr['stop']['time']) hdr['stop']['scan_duration'] = humanize.naturaldelta( hdr['stop']['time'] - hdr['start']['time']) return hdr
def apertium_stats(phenny, input): '''Fetch function and usage statistics from APy.''' opener = urllib.request.build_opener() opener.addheaders = headers try: response = opener.open(phenny.config.APy_url + '/stats').read() except urllib.error.HTTPError as error: handle_error(error) jdata = json.loads(response.decode('utf-8')) periodStats = jdata['responseData']['periodStats'] runningPipes = jdata['responseData']['runningPipes'] holdingPipes = jdata['responseData']['holdingPipes'] useCount = jdata['responseData']['useCount'] uptime = jdata['responseData']['uptime'] # rudimentary pluralizer def plural(num, word, be=False): if num == 1: if be: return 'is {:d} {:s}'.format(num, word) return '{:d} {:s}'.format(num, word) if be: return 'are {:d} {:s}s'.format(num, word) return '{:d} {:s}s'.format(num, word) pipe = input.group(2) if pipe: runningPipes = jdata['responseData']['runningPipes'] useCount = jdata['responseData']['useCount'] if pipe in runningPipes: phenny.say('The {:s} pipe has {:s} and has been used {:s}.'.format( pipe, plural(runningPipes[pipe], 'instance'), plural(useCount[pipe], 'time'))) else: phenny.say('There is no running pipe called {:s}. (You can run .apystats in a ' 'private query for details about all pipes.)'.format(pipe)) return phenny.say('In the last hour, APy has processed {:s}, totalling {:s} ' 'and {:.2f} seconds, averaging {:.2f} characters per second.'.format( plural(periodStats['requests'], 'request'), plural(periodStats['totChars'], 'character'), periodStats['totTimeSpent'], periodStats['charsPerSec'])) if input.sender.startswith('#'): phenny.say('There {:s}.'.format(plural(len(runningPipes), 'running translation pipe', be=True))) phenny.say('(Run .apystats <pipe> for details about <pipe>, or ' 'run .apystats in a private query for details about all pipes.)') else: phenny.say('There {:s}:'.format(plural(len(runningPipes), 'running translation pipe', be=True))) for langs in runningPipes: phenny.say(' {:s}: {:s}, used {:s}'.format( langs, plural(runningPipes[langs], 'instance'), plural(useCount[langs], 'time'))) phenny.say('There {:s}.'.format(plural(holdingPipes, 'holding pipe', be=True))) phenny.say('APy has been up for {:s}.'.format(naturaldelta(uptime)))
def latest(): urls = Url.query.order_by(Url.creation_date.desc()).limit(5) ret = [] for url in urls: code = _convert_to_code(url.id) creation_date = (datetime.datetime.now() - url.creation_date).total_seconds() ret.append({'url': url.url, 'shorturl': url_for('main.expand', code=code, _external=True), 'clicks': url.clicks, 'creation_date': humanize.naturaldelta(creation_date)}) return jsonify(items=ret)
def __call__(self, cmd, remainder, msg): self.seen[msg.nick.casefold()] = time() if cmd != "!seen": return False if not self.allowed(msg) or not remainder: return False remainder = remainder.strip() crem = remainder.casefold() seen = self.seen.get(crem) if not seen: cur = self.conn.cursor() cur.execute("SELECT time FROM seen WHERE user = ?", (crem,)) seen = cur.fetchone() seen = seen and int(seen[0]) / 1000 if not seen: self.post("I have not seen {} since {}", remainder, naturaldelta(time() - self.start)) else: self.post("{} was last seen {} ago", remainder, naturaldelta(time() - seen)) return True
def _commenthandler(self, bug, bugid, data): comments = bug['comments'] bugid = str(bugid) if self.has_last_comment_time(): if comments: # get the timestamp of the last comment today = pytz.utc.localize(datetime.utcnow()) dt = dateutil.parser.parse(comments[-1]['time']) data[bugid]['last_comment'] = humanize.naturaldelta(today - dt) else: data[bugid]['last_comment'] = '' self.commenthandler(bug, bugid, data)
def humanize_duration(delta): """format for a single duration value - takes datatime.timedelta as arg Parameters ---------- delta : datetime.timedelta A time delta Returns ------- str Humanize delta to duration. """ return humanize.naturaldelta(delta)
def display_repo(user_input): req = web.get('%s/repos/%s' % (API_BASE_URL, user_input)) try: repo = req.json() except: parts = user_input.split('/') return display_repos(parts[0], parts[1]) status = get_status(repo['last_build_result']) title = 'Build #%s (%s)' % (repo['last_build_number'], status) subtitle = None if repo['last_build_duration']: timedelta = datetime.timedelta(seconds=repo['last_build_duration']) subtitle = 'Duration: %s' % humanize.naturaldelta(timedelta) wf.add_item(title, subtitle, arg=user_input, autocomplete=user_input, valid=True)
def build_proxy(user_proxy, csr_priv_key): """ Builds a new proxy with the chain sent by the user plus our private key Returns the proxy and the time left (humanized) """ pem_chain = CERT_RE.findall(user_proxy) x509_chain = [x509.load_pem_x509_certificate(c.encode('ascii', 'ignore'), default_backend()) for c in pem_chain] if len(x509_chain) < 2: # should have 2 certs in the chain current_app.logger.debug("HEY!") abort(400) proxy = x509_chain[0] issuer_names = [n for n in proxy.issuer] subject_names = [n for n in proxy.subject] # should be the same but the last CN if issuer_names != subject_names[:-1]: current_app.logger.debug("not same name!") current_app.logger.debug("%s" % issuer_names) current_app.logger.debug("%s" % subject_names) abort(400) if subject_names[-1].oid != NameOID.COMMON_NAME: current_app.logger.debug("not a CN!") abort(400) private_key = serialization.load_pem_private_key( csr_priv_key.encode('ascii'), password=None, backend=default_backend() ) p_mod = proxy.public_key().public_numbers().n priv_mod = private_key.private_numbers().public_numbers.n if p_mod != priv_mod: # signed with a different key!? current_app.logger.debug("DIFFERENT KEY!") abort(400) new_proxy_chain = [pem_chain[0], csr_priv_key] new_proxy_chain.extend(pem_chain[1:]) time_left = (proxy.not_valid_after.replace(tzinfo=pytz.utc) - datetime.now(tz=pytz.utc)) return '\n'.join(new_proxy_chain), humanize.naturaldelta(time_left)
def generate_success_email(self, cutoutjob): try: tag = None files_size = None start = cutoutjob.cjb_start_time finish = cutoutjob.cjb_finish_time if cutoutjob.cjb_tag: tag = cutoutjob.cjb_tag.upper() if cutoutjob.cutout_set.count(): sum_sizes = cutoutjob.cutout_set.aggregate(sum_size=Sum('ctt_file_size')) files_size = humanize.naturalsize(sum_sizes.get("sum_size")) tdelta = finish - start seconds = tdelta.total_seconds() execution_time = str(datetime.timedelta(seconds=seconds)).split('.')[0] execution_time_humanized = humanize.naturaldelta(datetime.timedelta(seconds=seconds)) image_formats = cutoutjob.cjb_image_formats if image_formats is None: image_formats = 'png' context = dict({ "username": cutoutjob.owner.username, "target_display_name": cutoutjob.cjb_product.prd_display_name, "cutoutjob_display_name": cutoutjob.cjb_display_name, "cutoutjob_type:": cutoutjob.cjb_job_type, "cutoutjob_tag": tag, "cutoutjob_xsize": int((float(cutoutjob.cjb_xsize) * 60)), # converter para arcsec "cutoutjob_ysize": int((float(cutoutjob.cjb_ysize) * 60)), "cutoutjob_image_formats": image_formats, "n_objects": cutoutjob.cjb_product.table.catalog.ctl_num_objects, "n_files": cutoutjob.cutout_set.count(), "files_size": files_size, "start": str(start.strftime("%Y-%m-%d %H:%M")), "finish": str(finish.strftime("%Y-%m-%d %H:%M")), "execution_time": execution_time, "execution_time_humanized": execution_time_humanized }) return render_to_string("cutout_notification_finish.html", context) except Exception as e: self.logger.error(e)
def print_finish_message(fin_dict): if fin_dict['status'] == 'not_found': print('no task found!') return elif fin_dict['status'] == 'finished_before': fin_time = datetime.datetime.fromtimestamp(fin_dict['datapoint'].finished['time']) print_datapoint(fin_dict['datapoint']) print(colorize(['bold', 'warning'], 'task was finished @%s on %s' % ( fin_time.strftime(time_format), fin_dict['datapoint'].finished['host']))) return else: fin_time = datetime.datetime.fromtimestamp(fin_dict['datapoint'].finished['time']) start_time = datetime.datetime.fromtimestamp(fin_dict['datapoint'].time) print('task "%s" \nfinished @%s (took %s)' % ( fin_dict['datapoint'].task, fin_time.strftime(time_format), humanize.naturaldelta(fin_time - start_time) ))
def subreddit(bot, args, sender, source): """Ran whenever a subreddit is mentioned""" if dave.config.redis.exists("reddit:subreddit:mentioned:{}:{}".format(args[0], source)): # if this subreddit was mentioned in the last x seconds (see the setex below), # don't spam info about it return if not dave.config.redis.exists("reddit:subreddit:{}".format(args[0])): req = get("https://reddit.com/r/{}/about.json".format(args[0]), headers={'user-agent': 'irc bot (https://github.com/w4)'}) if req.status_code != 200: return if "/search.json" in req.url: # 404'd, reddit redirected us to the search page because they couldn't find # the user. return req = req.json() dave.config.redis.setex("reddit:subreddit:{}".format(args[0]), 600, pickle.dumps(req)) else: req = pickle.loads(dave.config.redis.get("reddit:subreddit:{}".format(args[0]))) resp = req["data"] # don't give info about this user again in this channel for 300 seconds dave.config.redis.setex("reddit:subreddit:mentioned:{}:{}".format(args[0], source), 300, 1) bot.msg(source, assembleFormattedText( A.normal[ A.bold[A.fg.lightRed["[NSFW] "]] if resp["over18"] else "", A.normal[resp["title"]], " ({}), a community for {}. {} subscribers, {} browsing right now.".format( resp["display_name_prefixed"], naturaldelta(datetime.utcnow().timestamp() - resp["created"]), intcomma(resp["subscribers"]), intcomma(resp["accounts_active"]) ) ] ))
def generate_failure_email(self, cutoutjob): try: start = cutoutjob.cjb_start_time finish = timezone.now() tdelta = finish - start seconds = tdelta.total_seconds() execution_time_humanized = humanize.naturaldelta(datetime.timedelta(seconds=seconds)) context = dict({ "username": cutoutjob.owner.username, "target_display_name": cutoutjob.cjb_product.prd_display_name, "cutoutjob_display_name": cutoutjob.cjb_display_name, "execution_time_humanized": execution_time_humanized }) return render_to_string("cutout_notification_error.html", context) except Exception as e: self.logger.error(e)
def elapsed_time(start, end): """Calculate the elapsed time for a service activity. Arguments: start (:py:class:`str`): The activity start time. end (:py:class:`str`): The activity end time. Returns: :py:class:`tuple`: The start and end times and humanized elapsed time. """ start_time = safe_parse(start) end_time = safe_parse(end) if start_time is None or end_time is None: logger.warning('failed to generate elapsed time') text = 'elapsed time not available' else: text = 'took {}'.format(naturaldelta(parse(end) - parse(start))) return to_utc_timestamp(start_time), to_utc_timestamp(end_time), text
def index(): tc = TaskdConnection() tc.client_cert = "pki/client.cert.pem" tc.client_key = "pki/client.key.pem" tc.cacert_file = "pki/ca.cert.pem" with open("conf.json") as j: config = json.load(j) tc.server = config['server'] tc.group = config['group'] tc.username = config['username'] tc.uuid = config['user_uuid'] tc.connect() resp = tc.stats() d = dict([x.split(":") for x in resp.data]) # do some humanizing of the data d.update({k: humanize.naturalsize(v) for k,v in d.items() if "bytes" in k or k == "user data"}) d['uptime'] = humanize.naturaldelta(datetime.timedelta(seconds=int(d['uptime']))) # d['total bytes in'] = humanize.naturalsize(d['total bytes in']) tpl = template("stats.tpl", response=d) return tpl
def check_realm(server): logging.debug("Checking realm status") realmInfo = connection.get_realm(EUROPE, wowconfig["realm"]) session = Session() realm = session.query(Realm).filter(Realm.name == realmInfo.name).first() if realm is None: logging.debug("Never seen realm '{0}', creating new cache entry".format(realmInfo.name)) realm = Realm(name=realmInfo.name, lastseen=datetime.datetime.now()) session.add(realm) prevonline = realm.online prevlastseen = realm.lastseen realm.online = realmInfo.status realm.lastchecked = datetime.datetime.now() if realm.online: realm.lastseen = realm.lastchecked set_pvparea(session, realm, realmInfo.tolbarad) set_pvparea(session, realm, realmInfo.wintergrasp) session.commit() if (prevonline != realm.online): if realm.online: send_message( server, "announcements", u"{0} just came online! (offline for {1})".format( realm.name, humanize.naturaldelta(datetime.datetime.now() - prevlastseen) ) ) else: send_message( server, wowconfig["announcements"]["realm"], u"{0} just went offline".realm.name )
def get_execution_stats(cls, fn, input_df, fn_args, fn_kwargs, calculate_memory): start = time() output_df = get_pandas_func(cls, fn)(input_df, *fn_args, **fn_kwargs) exec_time = time() - start exec_time_pretty = humanize.naturaldelta(exec_time) if exec_time_pretty == "a moment": exec_time_pretty = f"{round(exec_time,6)} seconds" step_number = calc_step_number(fn.__name__, input_df) input_memory_size = (StepStats.calc_df_series_memory(input_df) if calculate_memory else None) output_memory_size = (StepStats.calc_df_series_memory(output_df) if calculate_memory else None) ExecutionStats = namedtuple( "ExecutionStats", "exec_time step_number input_memory_size output_memory_size", ) execution_stats = ExecutionStats(exec_time_pretty, step_number, input_memory_size, output_memory_size) return output_df, execution_stats
async def _print_router_info(router, agent=None): loc = await router.get_location() print(u" name: {}".format(router.name)) print(u" hex id: {}".format(router.id_hex)) print(u"id hash (base64): {}".format(hashFromHexId(router.id_hex))) print(u" location: {}".format( "unknown" if loc.countrycode is None else loc.countrycode)) print(u" address: {}:{} (DirPort={})".format( router.ip, router.or_port, router.dir_port)) print(u" flags: {}".format(" ".join(router.flags))) diff = datetime.datetime.utcnow() - router.modified print(u" last published: {} ago ({})".format(humanize.naturaldelta(diff), router.modified)) if agent: print( util.colors.italic("Extended information from" + util.colors.green(" onionoo.torproject.org") + ":")) details = await router.get_onionoo_details(agent) details.setdefault('dir_address', '<none>') details.setdefault('city_name', 'unknown') details.setdefault('region_name', 'unknown') details.setdefault('country_name', 'unknown') details['or_addresses'] = ', '.join(details.get('or_addresses', [])) details['verified_host_names_formatted'] = ', '.join( details['verified_host_names']) print( u" platform: {platform}\n" u" runnning: {running}\n" u" dir_address: {dir_address}\n" u" OR addresses: {or_addresses}\n" u" location: {city_name}, {region_name}, {country_name}\n" u" host names: {verified_host_names_formatted}\n" u" AS: {as} ({as_name})\n" u" last restarted: {last_restarted}\n" u" last changed: {last_changed_address_or_port}\n" u" last seen: {last_seen}\n" u" probabilities: guard={guard_probability} middle={middle_probability} exit={exit_probability}\n" u"".format(**details))
async def check_cooldowns(self, ctx): cooldown = await self.col2.find_one({ "uid": ctx.author.id, "cmd": ctx.command.name }) if not cooldown: return True else: if int(float(cooldown["time"])) > time.time(): if cooldown["uses"] >= cooldown["permitted_uses"]: period = humanize.naturaldelta( dt.timedelta(seconds=int(float(cooldown["time"])) - time.time())) raise CooldownError(f'Try again in {period}') else: return True else: await self.col2.delete_one({ "uid": ctx.author.id, "cmd": ctx.command.name }) return True
def Train(self) -> 'Model': """Train the model. Returns: The model instance. Raises: UnableToAcquireLockError: If the model is locked (i.e. there is another process currently modifying the model). """ self.corpus.Create() with self.training_lock.acquire(): self.backend.Train(self.corpus) total_time_ms = sum(t.epoch_wall_time_ms for t in self.TrainingTelemetry() [:self.config.training.num_epochs]) logging.info('Trained model for %d epochs in %s ms (%s).', self.config.training.num_epochs, humanize.intcomma(total_time_ms), humanize.naturaldelta(total_time_ms / 1000)) return self
async def status(self, ctx): async with ctx.typing(): embed = discord.Embed(title="Current Service Status", url="https://utils.thom.club/status") url = "https://api.uptimerobot.com/v2/getMonitors" payload = f"api_key={uptime_robot_api}&format=json&logs=1&all_time_uptime_ratio=1" headers = { 'content-type': "application/x-www-form-urlencoded", 'cache-control': "no-cache" } offline_count = 0 async with aiohttp.ClientSession() as session: async with session.post(url, data=payload, headers=headers) as request: returned_json = await request.json() for monitor in returned_json.get("monitors", []): monitor_info = "" if monitor["status"] > 7: offline_count += 1 monitor_info += "**Offline**\n\n" online_search = True last_text = "Last Online: {}\n\nI have been down for {}\n" else: monitor_info += "**Online**\n\n" online_search = False last_text = "Last Offline: {}\n\nI have been online for {}\n" last_event = self.get_last_event_time(monitor, online_search) if last_event != datetime.datetime(1970, 1, 1): delta_since_last = datetime.datetime.now() - last_event last_text = last_text.format(last_event.strftime("%a, %b %d at %I:%S%p"), humanize.naturaldelta(delta_since_last)) else: last_text = last_text.format("never", "all known history") monitor_info += last_text embed.add_field(name=f"__{monitor['friendly_name']}__", value=monitor_info, inline=False) if offline_count > 0: embed.colour = discord.Colour.red() else: embed.colour = discord.Colour.green() await ctx.reply(embed=embed)
def process_service_instance( self, service_instance: ServiceInstance) -> BounceResults: bounce_timers = self.setup_timers(service_instance) if service_instance.enqueue_time is not None: bounce_timers.processed_by_worker.record( time.time() - service_instance.enqueue_time) human_bounce_by = humanize.naturaldelta( datetime.timedelta(seconds=(time.time() - service_instance.bounce_by))) self.log.info( f"{self.name} processing {service_instance.service}.{service_instance.instance} (bounce_by {human_bounce_by} ago)" ) # noqa E501 bounce_timers.setup_marathon.start() return_code, bounce_again_in_seconds = deploy_marathon_service( service=service_instance.service, instance=service_instance.instance, clients=self.marathon_clients, soa_dir=marathon_tools.DEFAULT_SOA_DIR, marathon_apps_with_clients=None, ) bounce_timers.setup_marathon.stop() self.log.info( f"{self.name} setup marathon completed with exit code {return_code} for {service_instance.service}.{service_instance.instance}" ) # noqa E501 if bounce_again_in_seconds: self.log.info( f"{self.name} {service_instance.service}.{service_instance.instance} not in steady state so bouncing again in {bounce_again_in_seconds}" ) # noqa E501 else: self.log.info( f"{self.name} {service_instance.service}.{service_instance.instance} in steady state" ) if service_instance.processed_count > 0: bounce_timers.bounce_length.record( time.time() - service_instance.bounce_start_time) return BounceResults(bounce_again_in_seconds, return_code)
def do_bonus_user(client, message, *args, **kwargs): user = kwargs.get('user') session = kwargs.get('session') time_passed = datetime.utcnow() - user.last_dig time_needed = timedelta(days=1) if time_passed < time_needed: wait = time_needed - time_passed _t = humanize.i18n.activate("it") message.reply("🚫 Devi attendere ancora {}!\n".format(humanize.naturaldelta(time_needed))) humanize.i18n.deactivate() return amount = random.randint(*DAILY_AMOUNT_RANGE) if user.coins < 0: user.coins = 0 user.coins += amount user.last_dig = datetime.utcnow() message.reply("Hai ricevuto 💸**{} FiberCoin**\n" "💰 Borsellino: 💸**{} FiberCoin**\n\n" "ℹ Puoi richiedere il bonus una volta al giorno." \ .format(amount, user.coins) ) session.commit()
def _drop_dead_tags(data_getter, measurement, combination, dryrun, max_time): hostname, service, time = combination["hostname"], combination[ "service"], combination["time"] most_recent_time = data_getter.exec_query( MOST_RECENT_QUERY.format(**locals())) if not len(most_recent_time): logger.info("%s %s %s has no data", measurement, hostname, service) return most_recent_time = most_recent_time[0]["time"] seconds_since_last_write = time() - most_recent_time logger.info("%s %s %s hasn't been written to in %s", measurement, hostname, service, naturaldelta(seconds_since_last_write)) if seconds_since_last_write > max_time: logger.info("Going to delete %s %s %s", measurement, hostname, service) if not dryrun: #data_getter.drop_measurement(measurement) pass
def command_about(self, event): embed = MessageEmbed() embed.set_author(name='Planeboat', icon_url=self.client.state.me.avatar_url, url='https://rowboat.party/') embed.description = BOT_INFO embed.add_field(name='Servers', value=str(Guild.select().count()), inline=True) embed.add_field(name='Uptime', value=humanize.naturaldelta(datetime.utcnow() - self.startup), inline=True) #global_admin = rdb.sismember('global_admins', event.author.id) #_usr = User.select().where(User.user_id == event.author.id) #if len(_usr) == 1: # global_admin = _usr[0].admin global_admin = self.is_global_admin(event.author.id) if global_admin: embed.add_field(name='Admin', value='You are a rowboat global admin!') event.msg.reply(embed=embed)
async def list_reminders(self, ctx: commands.Context): """Show the ten latest reminders created by you - Also shows the id and when it will send the reminder - It will only show reminders that are still active - Reminders will be removed if the bot is unable to access the channel """ reminders = (await Reminder.query.where(Reminder.user_id == ctx.author.id ).order_by( Reminder.reminder_time.asc() ).limit(10).gino.all()) embed = self.bot.create_embed(title="Reminders") if not reminders: embed.description = f"{Icons.ALERT} No reminders were found." for reminder in reminders: time_left = naturaldelta(reminder.reminder_time) embed.add_field(name=f"{reminder.reminder_id}: In {time_left}", value=reminder.reminder_text, inline=False) await ctx.send(embed=embed)
def print_day(day, tags=None, base_indent=' ', suppress_empty=False): if suppress_empty: if not day.datapoints: return if tags: if not day.get_datapoint_list(tags=tags): return this_indent = base_indent + ' ' iprint(colorize('bold', colorize('underline', '%s' % (day.day.strftime(date_format + ' (%A)')))), base_indent) if not day.datapoints: iprint('nothing done.', this_indent) print('\n') return for host in day.datapoints.keys(): boot_time = day.datapoints[host][0].time_as_dt - datetime.timedelta( seconds=day.datapoints[host][0].uptime) if boot_time.date() == datetime.datetime.now().date(): iprint(colorize('bold', '[boot %s] %s' % (host, boot_time.strftime(time_format))), base_indent) else: iprint(colorize('bold', '[boot %s] %s' % (host, boot_time.strftime(date_format + ' ' + time_format))), base_indent) for point in day.get_datapoint_list(tags=tags): print_datapoint(point, this_indent) dp_list = day.get_datapoint_list(tags=tags) if len(dp_list) >= 2: diff = dp_list[-1].time_as_dt - dp_list[0].time_as_dt iprint(colorize('bold', '%s between first and last task.\n' % diff), base_indent) if day.day.date() == datetime.datetime.today().date(): iprint(colorize('bold', '\nup for %s now.' % humanize.naturaldelta(get_uptime())), base_indent) print('\n') """
def get_thank_you(schema, metadata, eq_id, form_type): # pylint: disable=unused-argument session_data = get_session_store().session_data completeness = get_completeness(current_user) if session_data.submitted_time: metadata_context = build_metadata_context_for_survey_completed( session_data) view_submission_url = None view_submission_duration = 0 if _is_submission_viewable(schema.json, session_data.submitted_time): view_submission_url = url_for('.get_view_submission', eq_id=eq_id, form_type=form_type) view_submission_duration = humanize.naturaldelta( timedelta(seconds=schema.json['view_submitted_response'] ['duration'])) return render_theme_template( schema.json['theme'], template_name='thank-you.html', metadata=metadata_context, analytics_ua_id=current_app.config['EQ_UA_ID'], survey_id=schema.json['survey_id'], survey_title=TemplateRenderer.safe_content(schema.json['title']), is_view_submitted_response_enabled= is_view_submitted_response_enabled(schema.json), view_submission_url=view_submission_url, view_submission_duration=view_submission_duration) routing_path = path_finder.get_full_routing_path() collection_id = metadata['collection_exercise_sid'] router = Router(schema, routing_path, completeness) next_location = router.get_next_location() return _redirect_to_location(collection_id, metadata.get('eq_id'), metadata.get('form_type'), next_location)
def _print_cyclic_core( x, y, xcore, ycore, essential, t0, prm, fol): """Print results of cyclic core computation. Assert support and covering properties. """ if log.getEffectiveLevel() > logging.INFO: return # assert if essential != fol.false: assert support_issubset(essential, prm.p_vars, fol) if xcore != fol.false: assert support_issubset(xcore, prm.p_vars, fol) if ycore != fol.false: assert support_issubset(ycore, prm.p_vars, fol) # print m = fol.count(x) n = fol.count(y) log.info(( '(x={m}, y={n}) implicants of ' 'covering problem').format( m=humanize.intcomma(m), n=humanize.intcomma(n))) m = fol.count(xcore) n = fol.count(ycore) log.info(( '(x={m}, y={n}) implicants after ' 'removing essential elements').format( m=humanize.intcomma(m), n=humanize.intcomma(n))) n = fol.count(essential) log.info('{n} primes are essential'.format( n=humanize.intcomma(n))) t1 = time.time() dt = t1 - t0 log.info('cyclic core took {dt}'.format( dt=humanize.naturaldelta(dt)))
async def daily(self, ctx): """ Get your daily credits """ statement = "UPDATE users SET daily_cooldown = $1 where user_id = $2" check = await ctx.db.fetchval( "SELECT daily_cooldown from users where user_id = $1", ctx.author.id) if check is None: await ctx.db.execute( statement, ctx.message.created_at.replace(tzinfo=None) + datetime.timedelta(days=1), ctx.author.id) else: time = check now = discord.utils.utcnow() if time > discord.utils.utcnow(): return await ctx.send( ":information_source: | you can collect your daily credits again in " + h.naturaldelta(now - time)) await ctx.db.execute( statement, ctx.message.created_at.replace(tzinfo=None) + datetime.timedelta(days=1), ctx.author.id) await ctx.db.execute( "UPDATE users SET credits = credits + $1 WHERE user_id = $2", 2000, ctx.author.id) await ctx.send( f":atm: | 2000 credits was added to your account {ctx.author.name}" )
def infraction_info(self, event, infraction): try: user = User.alias() actor = User.alias() infraction = Infraction.select(Infraction, user, actor).join( user, on=((Infraction.user_id == user.user_id).alias('user')) ).switch(Infraction).join( actor, on=((Infraction.actor_id == actor.user_id).alias('actor')) ).where( (Infraction.id == infraction) & (Infraction.guild_id == event.guild.id) ).get() except Infraction.DoesNotExist: raise CommandFail('cannot find an infraction with ID `{}`'.format(infraction)) type_ = {i.index: i for i in Infraction.Types.attrs}[infraction.type_] embed = MessageEmbed() if type_ in (Infraction.Types.MUTE, Infraction.Types.TEMPMUTE, Infraction.Types.TEMPROLE): embed.color = 0xfdfd96 elif type_ in (Infraction.Types.KICK, Infraction.Types.SOFTBAN): embed.color = 0xffb347 else: embed.color = 0xff6961 embed.title = str(type_).title() embed.set_thumbnail(url=infraction.user.get_avatar_url()) embed.add_field(name='User', value=unicode(infraction.user), inline=True) embed.add_field(name='Moderator', value=unicode(infraction.actor), inline=True) embed.add_field(name='Active', value='yes' if infraction.active else 'no', inline=True) if infraction.active and infraction.expires_at: embed.add_field(name='Expires', value=humanize.naturaldelta(infraction.expires_at - datetime.utcnow())) embed.add_field(name='Reason', value=infraction.reason or '_No Reason Given', inline=False) embed.timestamp = infraction.created_at.isoformat() event.msg.reply('', embed=embed)
async def get_pr(request): watched_branch_index = int(request.match_info['watched_branch_index']) pr_number = int(request.match_info['pr_number']) try: wb = watched_branches[watched_branch_index] pr = wb.prs[pr_number] except IndexError: raise web.HTTPNotFound() config = {} config['number'] = pr.number if pr.batch: status = await pr.batch.status() for j in status['jobs']: if 'duration' in j: j['duration'] = humanize.naturaldelta(datetime.timedelta(seconds=j['duration'])) attrs = j['attributes'] if 'link' in attrs: attrs['link'] = attrs['link'].split(',') config['batch'] = status config['artifacts'] = f'{BUCKET}/build/{pr.batch.attributes["token"]}' return config
def check_afk(update: Update, context: CallbackContext, user_id: int, fst_name: str, userc_id: int): if sql.is_afk(user_id): user = sql.check_afk_status(user_id) if int(userc_id) == int(user_id): return time = humanize.naturaldelta(datetime.now() - user.time) if not user.reason: res = "{} is *afk*.\nLast seen: `{} ago`".format(fst_name, time) update.effective_message.reply_text( res, parse_mode=ParseMode.MARKDOWN, ) else: res = "{} is *afk*.\nReason: `{}`\nLast seen: `{} ago`".format( html.escape(fst_name), html.escape(user.reason), time) update.effective_message.reply_text( res, parse_mode=ParseMode.MARKDOWN, )
def get_client_proxy_info(profile): """ Returns information on the current proxy (if available) """ info = {'user_proxy': False} if ('SSL_CLIENT_M_SERIAL' not in request.environ or 'SSL_CLIENT_V_END' not in request.environ or 'SSL_CLIENT_I_DN' not in request.environ or request.environ.get('SSL_CLIENT_VERIFY') != 'SUCCESS'): info['user_cert'] = False else: info['user_dn'] = request.environ['SSL_CLIENT_S_DN'] info['user_cert'] = request.environ['SSL_CLIENT_CERT'] if profile.user_proxy: px = x509.load_pem_x509_certificate( profile.user_proxy.encode('ascii', 'ignore'), default_backend()) not_after = px.not_valid_after.replace(tzinfo=pytz.utc) time_left = not_after - datetime.now(tz=pytz.utc) # let's consider a valid proxy if you have at least 10 min if time_left.total_seconds() > 600: info['user_proxy'] = True info['user_proxy_time_left'] = humanize.naturaldelta(time_left) return info
def check_afk(update: Update, context: CallbackContext, user_id: int, fst_name: str, userc_id: int): if sql.is_afk(user_id): user = sql.check_afk_status(user_id) if int(userc_id) == int(user_id): return time = humanize.naturaldelta(datetime.now() - user.time) if not user.reason: res = "{} is afk.\n\nLast seen {} ago.".format( fst_name, time, ) update.effective_message.reply_text(res) else: res = "{} is AFK.\nReason: <code>{}</code>\nLast seen {} ago.".format( html.escape(fst_name), html.escape(user.reason), time, ) update.effective_message.reply_text(res, parse_mode="html")
def drop_dead_measurements(data_getter: DataGetter, dryrun: bool = True, max_time: int = 3 * 365 * 24 * 60 * 60): measurements = data_getter.get_measurements() logger.info("The available measurements are %s", measurements) deleted_measurements = set() for measurement in measurements: most_recent_time = data_getter.exec_query( MOST_RECENT_QUERY.format(**locals()))[0]["time"] seconds_since_last_write = time() - most_recent_time logger.info("%s hasn't been written to in %s", measurement, naturaldelta(seconds_since_last_write)) if seconds_since_last_write > max_time: logger.info("Going to delete %s", measurement) if not dryrun: deleted_measurements.add(measurement) data_getter.drop_measurement(measurement) if deleted_measurements: logger.info("Deleted %s", deleted_measurements) else: logger.info("No measurement was deleted.")
def user(bot, args, sender, source): if dave.config.redis.exists("reddit:user:mentioned:{}:{}".format( args[0], source)): # if this user was mentioned in the last x seconds (see the setex below), don't # spam info about them return if not dave.config.redis.exists("reddit:user:{}".format(args[0])): req = get("https://reddit.com/u/{}/about.json".format(args[0]), headers={'user-agent': 'irc bot (https://github.com/w4)'}) if req.status_code != 200: return req = req.json() dave.config.redis.setex("reddit:user:{}".format(args[0]), 600, pickle.dumps(req)) else: req = pickle.loads( dave.config.redis.get("reddit:user:{}".format(args[0]))) resp = req["data"] # don't give info about this user again in this channel for 300 seconds dave.config.redis.setex( "reddit:user:mentioned:{}:{}".format(args[0], source), 300, 1) bot.msg( source, assembleFormattedText(A.normal[ A.bold[resp["name"]], ", a redditor for {}. {} link karma, {} comment karma.".format( naturaldelta(datetime.utcnow().timestamp() - resp["created"]), intcomma(resp["link_karma"]), intcomma(resp["comment_karma"])), " Verified user." if resp["verified"] else "", " Reddit employee." if resp["is_employee"] else ""]))
async def reason(self, ctx, number: int, *, reason): """Edits the reason of a punishment""" info = await self.bot.mod_pool.fetch( "SELECT * FROM action WHERE id = $1", number) if not info: return await ctx.send(f"Case {number} does not exist!") else: info = info[0] await ctx.send(f"Set the reason to: `{reason}`") await self.bot.mod_pool.execute( "UPDATE action SET reason = $1 WHERE id = $2", reason, number) target = ctx.guild.get_member(info['userid']) mod = ctx.guild.get_member(info['modid']) if info['type'] == "Mute": mute_info = await self.bot.mod_pool.fetchval( "SELECT expire FROM mutes WHERE id = $1", info['id']) time = humanize.naturaldelta(mute_info - datetime.datetime.utcnow()) string = f"**__Length:__** ``{time}``" else: string = "" embed = discord.Embed(title=info['type'].title(), color=discord.Color.blurple(), description=wrap(f""" **__Victim:__** {target} ({target.id}) **__Reason:__** ``{reason}`` {string} """)) embed.set_author(name=mod, icon_url=mod.avatar_url) embed.set_footer(text=f"Case #{number}") embed.timestamp = info["time"] message = await self.bot.main_guild.get_channel( 716719009499971685).fetch_message(info['messageid']) await message.edit(embed=embed)
def category_detail_view(request, category_id): category = Category.objects.get(category_id=category_id) threads = Thread.objects.filter(category_id=category_id).values() forum_user = None tags = category.tags.all() if request.user.is_authenticated: forum_user = ForumUser.objects.get(id=request.user.id) forum_user.profile_pic_path = str(forum_user.profile_pic) try: for thread in threads: posts = Post.objects.filter( thread_id=thread["thread_id"]).order_by("created_on") posted_user_id = posts.first().posted_by_id.id username = User.objects.get(id=posted_user_id).get_username() thread["started_by"] = username # gets profile pic of original poster in each thread posted_by_user = ForumUser.objects.get(id=posted_user_id) start_user_pic = posted_by_user.profile_pic.url thread["start_user_pic"] = str(start_user_pic) thread["tags"] = Thread.objects.get( thread_id=thread["thread_id"]).tags.all() timedelta = (datetime.now(timezone.utc) - thread["latest_post_on"]) # converts time elapsed since latest post to human readable format timedelta = humanize.naturaldelta(timedelta) thread["latest_activity"] = timedelta except AttributeError as e: logger.error("ERROR: " + str(e)) except ValueError as e: logger.error("ERROR: " + str(e)) return render(request, "error404.html", {}) return render( request, "page-categories-single.html", { "category": category, "threads": threads, "forum_user": forum_user, "tags": tags })
async def system_fronthistory(ctx: CommandContext, args: List[str]): if len(args) == 0: if not ctx.system: raise NoSystemRegistered() system = ctx.system else: system = await utils.get_system_fuzzy(ctx.conn, ctx.client, args[0]) if system is None: raise CommandError("Can't find system \"{}\".".format(args[0])) lines = [] front_history = await pluralkit.utils.get_front_history(ctx.conn, system.id, count=10) for i, (timestamp, members) in enumerate(front_history): # Special case when no one's fronting if len(members) == 0: name = "(no fronter)" else: name = ", ".join([member.name for member in members]) # Make proper date string time_text = timestamp.isoformat(sep=" ", timespec="seconds") rel_text = humanize.naturaltime(timestamp) delta_text = "" if i > 0: last_switch_time = front_history[i - 1][0] delta_text = ", for {}".format( humanize.naturaldelta(timestamp - last_switch_time)) lines.append("**{}** ({}, {}{})".format(name, time_text, rel_text, delta_text)) embed = utils.make_default_embed("\n".join(lines) or "(none)") embed.title = "Past switches" return embed
def view_interlock_sessions(request): if not request.user.profile.can_manage_interlocks: return HttpResponseForbidden( "You do not have permission to access that.") raw_sessions = InterlockLog.objects.all() interlock_sessions = [] for session in raw_sessions: user_off = "IN USE" if session.user_off: user_off = session.user_off.profile.get_full_name() session = { "id": session.id, "interlock_name": session.interlock.name, "time_on": timezone.make_naive( session.first_heartbeat).strftime("%Y-%m-%d %H:%M:%S"), "time_off": timezone.make_naive( session.last_heartbeat).strftime("%Y-%m-%d %H:%M:%S"), "user_on": session.user.profile.get_full_name(), "user_off": user_off, "on_for": humanize.naturaldelta(session.last_heartbeat - session.first_heartbeat), "completed": session.session_complete } interlock_sessions.append(session) return render(request, 'view_interlock_sessions.html', {"sessions": interlock_sessions})
async def system_card(conn, client: discord.Client, system: System, is_own_system: bool = True) -> discord.Embed: card = discord.Embed() card.colour = discord.Colour.blue() if system.name: card.title = truncate_title(system.name) if system.avatar_url: card.set_thumbnail(url=system.avatar_url) if system.tag: card.add_field(name="Tag", value=truncate_field_body(system.tag)) fronters, switch_time = await get_fronters(conn, system.id) if fronters: names = ", ".join([member.name for member in fronters]) fronter_val = "{} (for {})".format(names, humanize.naturaldelta(switch_time)) card.add_field(name="Current fronter" if len(fronters) == 1 else "Current fronters", value=truncate_field_body(fronter_val)) account_names = [] for account_id in await system.get_linked_account_ids(conn): try: account = await client.get_user_info(account_id) account_names.append("<@{}> ({}#{})".format(account_id, account.name, account.discriminator)) except discord.NotFound: account_names.append("(deleted account {})".format(account_id)) card.add_field(name="Linked accounts", value=truncate_field_body("\n".join(account_names))) if system.description: card.add_field(name="Description", value=truncate_field_body(system.description), inline=False) card.add_field(name="Members", value="*See `pk;system {0} list`for the short list, or `pk;system {0} list full` for the detailed list*".format(system.hid) if not is_own_system else "*See `pk;system list` for the short list, or `pk;system list full` for the detailed list*") card.set_footer(text="System ID: {}".format(system.hid)) return card
async def on_command_error(ctx, error): # this is an event that runs when there is an error if isinstance(error, discord.ext.commands.errors.CommandNotFound): return elif isinstance(error, discord.ext.commands.errors.CommandOnCooldown): emote = str(discord.PartialEmoji(name="ppOverheat", id=772189476704616498, animated=True)) s = round(error.retry_after,2) s = humanize.naturaldelta(s) msgtodelete = await ctx.send_in_codeblock(f"error; cooldown for {s}") await asyncio.sleep(bot.secondstoReact) await msgtodelete.delete() return elif isinstance(error, discord.ext.commands.errors.NotOwner): msgtodelete = await ctx.send_in_codeblock("error; missing permissions") await asyncio.sleep(15) await msgtodelete.delete() elif isinstance(error, botchecks.BanCheckError): await ctx.send_in_codeblock(f"error; you're banned! please join the Fishy.py support server to appeal ({ctx.prefix}support)") return elif isinstance(error, botchecks.IsNotInGuild): await ctx.send_in_codeblock(f"error; sorry, you can only run this command in a guild. right now you are DM'ing me!") return elif isinstance(error, botchecks.BlacklistedChannel): return elif isinstance(error, discord.Forbidden): try: await ctx.send_in_codeblock("error; i'm missing some permissions. please make sure i have embed permissions, manage messages, and use external emojis.") except: # RIP print('Ignoring exception in command {}:'.format(ctx.command), file=sys.stderr) traceback.print_exception(type(error), error, error.__traceback__, file=sys.stderr) #await ctx.send_in_codeblock(f"Internal Error\n- {error}",language='diff') else: bot.commandsFailed += 1 # All other Errors not returned come here. And we can just print the default TraceBack. print('Ignoring exception in command {}:'.format(ctx.command), file=sys.stderr) traceback.print_exception(type(error), error, error.__traceback__, file=sys.stderr) await ctx.send_in_codeblock(f"Internal Error\n- {error}",language='diff')
def run(self): # support vars sort_map = self.__get_images() count = 1 total = len(sort_map) tags = dict() # loop through image list self.__output('Sorting {} images'.format(total)) for image in sort_map: # get target tag = image['tag'] target = self.__get_child(tag) # output msg = '[{}] {} => {}'.format(self.__percent(count, total), image['path'], target.child(image['path'].name)) self.__output(msg, header=False) # move and save stats image['path'].move(target.child(image['path'].name)) count += 1 tags[tag] = tags.get(tag, 0) + 1 # summary timer = naturaldelta(datetime.now() - self.started_at) msg = 'Summary: {} images sorted in {}'.format(total, timer) self.__output(msg) summary = list() for tag, count in reversed(sorted(tags.items(), key=itemgetter(1))): target = self.__get_child(tag) summary.append('{} images moved to {}'.format(count, target)) self.__output(summary + [''], header=False) self.__clean_empty_dirs()
def user(bot, args, sender, source): if dave.config.redis.exists("reddit:user:mentioned:{}:{}".format(args[0], source)): # if this user was mentioned in the last x seconds (see the setex below), don't # spam info about them return if not dave.config.redis.exists("reddit:user:{}".format(args[0])): req = get("https://reddit.com/u/{}/about.json".format(args[0]), headers={'user-agent': 'irc bot (https://github.com/w4)'}) if req.status_code != 200: return req = req.json() dave.config.redis.setex("reddit:user:{}".format(args[0]), 600, pickle.dumps(req)) else: req = pickle.loads(dave.config.redis.get("reddit:user:{}".format(args[0]))) resp = req["data"] # don't give info about this user again in this channel for 300 seconds dave.config.redis.setex("reddit:user:mentioned:{}:{}".format(args[0], source), 300, 1) bot.msg(source, assembleFormattedText( A.normal[ A.normal[resp["name"]], ", a redditor for {}. {} link karma, {} comment karma.".format( naturaldelta(datetime.utcnow().timestamp() - resp["created"]), intcomma(resp["link_karma"]), intcomma(resp["comment_karma"]) ), " Verified user." if resp["verified"] else "", " Reddit employee." if resp["is_employee"] else "" ] ))
def get_date(year, month, day): date = datetime.date(year, month, day) data = {} while date > EPOCH: observations = parse_observations(get_observations(date), include_all='all' in flask.request.args) categories = set(observations.keys()) data[date] = {'observations': observations, 'categories': categories} if 'history' in flask.request.args: date = date + dateutil.relativedelta.relativedelta(years=-1) else: break def jumplink(text, **kwargs): if kwargs.get('today'): new_date = datetime.date.today() elif kwargs.get('random'): new_date = random_date() else: new_date = date + dateutil.relativedelta.relativedelta(**kwargs) return '<a class="button" href="{link}">{text}</a>'.format( link=new_date.strftime('/%Y/%m/%d'), text=text) def is_favorite(hash): return bool(FAVORITES.get(str(date), {}).get(hash)) return flask.render_template('daily.html', date=date, offset=lambda date: humanize.naturaldelta( datetime.date.today() - date), data=data, jumplink=jumplink, is_favorite=is_favorite)
def index(): tc = TaskdConnection() tc.client_cert = "pki/client.cert.pem" tc.client_key = "pki/client.key.pem" tc.cacert_file = "pki/ca.cert.pem" with open("conf.json") as j: config = json.load(j) tc.server = config['server'] tc.group = config['group'] tc.username = config['username'] tc.uuid = config['user_uuid'] tc.connect() resp = tc.stats() d = dict([x.split(":") for x in resp.data]) # do some humanizing of the data d.update({ k: humanize.naturalsize(v) for k, v in d.items() if "bytes" in k or k == "user data" }) d['uptime'] = humanize.naturaldelta( datetime.timedelta(seconds=int(d['uptime']))) # d['total bytes in'] = humanize.naturalsize(d['total bytes in']) tpl = template("stats.tpl", response=d) return tpl
def overall_stats(metadata): good_annotations = count_annotations(metadata, 'good') bad_annotations = count_annotations(metadata, 'bad') total_annotations = good_annotations + bad_annotations num_1_annotations = 0 num_5_annotations = 0 num_10_annotations = 0 for lang, samples in metadata.items(): n_good = count_lang_annotations(samples, 'good') if n_good >= 1: num_1_annotations += 1 if n_good >= 5: num_5_annotations += 1 if n_good >= 10: num_10_annotations += 1 time_annotated = humanize.naturaldelta(total_annotations * 20) return locals()
def estimate_time(builds): """Update the working build with an estimated completion time. Takes a simple average over the previous builds, using those whose outcome is ``'passed'``. Arguments: builds (:py:class:`list`): All builds. """ try: index, current = next( (index, build) for index, build in enumerate(builds[:4]) if build['outcome'] == 'working' ) except StopIteration: return # no in-progress builds if current.get('started_at') is None: current['elapsed'] = 'estimate not available' return usable = [ current for current in builds[index + 1:] if current['outcome'] == 'passed' and current['duration'] is not None ] if not usable: current['elapsed'] = 'estimate not available' return average_duration = int(sum(build['duration'] for build in usable) / float(len(usable))) finish = current['started_at'] + average_duration remaining = (datetime.fromtimestamp(finish) - datetime.now()).total_seconds() if remaining >= 0: current['elapsed'] = '{} left'.format(naturaldelta(remaining)) else: current['elapsed'] = 'nearly done'
def create_stop_timer(logger, resource_claim, **kwargs): stop_timestamp = resource_claim.stop_timestamp if not stop_timestamp: return stop_datetime = isoparse(stop_timestamp) notification_timedelta = stop_datetime - datetime.now( timezone.utc) - timedelta(minutes=30, seconds=30) notification_interval = notification_timedelta.total_seconds() if notification_interval > 0: logger.info("scheduled stop notification in " + naturaldelta(notification_timedelta)) timer = Timer(notification_interval, notify_scheduled_stop, kwargs={ "logger": kopf.LocalObjectLogger( body=resource_claim.definition, settings=kopf.OperatorSettings()), "resource_claim": resource_claim, **kwargs }) stop_timers[resource_claim.uid] = timer timer.start()
def trigger_reminders(self): reminders = Reminder.with_message_join().where( (Reminder.remind_at < (datetime.utcnow() + timedelta(seconds=1)))) for reminder in reminders: message = reminder.message_id channel = self.state.channels.get(message.channel_id) if not channel: self.log.warning( 'Not triggering reminder, channel %s was not found!', message.channel_id) continue channel.send_message( u'<@{}> you asked me at {} ({} ago) to remind you about: {}'. format( message.author_id, reminder.created_at, humanize.naturaldelta(reminder.created_at - datetime.utcnow()), S(reminder.content))) reminder.delete_instance() self.queue_reminders()
def stream(streamkey): """ If there is a stream, display it, otherwise display a missing message """ app.logger.info('200, Access to /{}'.format(streamkey)) # Strip potential trailing slashes streamkey = streamkey.rstrip("/") stream = streamlist.get_stream(streamkey) streamkey = key_if_not_None(stream, "key", that=streamkey) description = key_if_not_None(stream, "description") # Render a different Template if the stream is missing if stream is None: existed = False # Stream was Missing, log warning running_since = None app.logger.info("Client {} looked for non-existent stream {}".format(request.remote_addr, streamkey)) else: existed = True app.logger.debug("Client requests stream {} ({}/{}.m3u8)".format(streamkey, config["application"]["hls_path"], streamkey)) running_since = humanize.naturaldelta(dt.timedelta(seconds=stream.active_since())) # Everything ok, return Stream return render_template('stream.html', application_name=APPLICATION_NAME, page_title=config["application"]["page_title"], hls_path=config["application"]["hls_path"], streamkey=streamkey, description=description, running_since=running_since, existed=existed)