def list(self): """List containers.""" # TODO: Verify sorting on dates and size ctnrs = sorted( self.client.containers.list(), key=operator.attrgetter(self._args.sort)) if not ctnrs: return rows = list() for ctnr in ctnrs: fields = dict(ctnr) fields.update({ 'command': ' '.join(ctnr.command), 'createdat': humanize.naturaldate(podman.datetime_parse(ctnr.createdat)), }) if self._args.truncate: fields.update({'image': ctnr.image[-30:]}) rows.append(fields) with Report(self.columns, heading=self._args.heading) as report: report.layout( rows, self.columns.keys(), truncate=self._args.truncate) for row in rows: report.row(**row)
def home(): post = getPost() sentiment = sentimentAnalysis(post.title) app.logger.debug(sentiment) #app.logger.debug(generateRandomDate()) #app.logger.debug(post.selftext) postedTime = humanize.naturaldate(datetime.datetime.fromtimestamp(post.created)) return render_template('letter.html', post = post, time = postedTime, sentiment = sentiment)
def _translate(dt, target_locale): en = default_loader.get_locale("en") target = default_loader.get_locale(target_locale) naturaldate = humanize.naturaldate(dt) base = en.translate(naturaldate, settings=dateparser.conf.settings) return target.info["relative-type"][base][-1]
def format_datetime(when: Optional[datetime.datetime]) -> str: if when is None: return "" assert when.tzinfo is not None delta = datetime.datetime.now(datetime.timezone.utc) - when if delta < datetime.timedelta(days=1): return humanize.naturaltime(delta) else: return humanize.naturaldate(when.astimezone())
def _transform_api_data(details): details['filesize'] = humanize.naturalsize(details['binary_filesize']) details['last_updated'] = ( humanize.naturaldate( parser.parse(details.get('last_updated')) ) ) return details
async def _apoll_history(self, ctx, poll_id: int = None): """Fetch information about past anonymous polls. If this command is called without any arguments then a list of past polls is fetched. If this command is called with a poll ID then the results of the poll with that specific poll ID are fetched. In a future update, polls older than 2 months will be automatically be deleted from the database.""" if poll_id is None: past_polls = await self.bot.database.fetch_polls(ctx.guild.id) if not past_polls: return await ctx.send( "There seem to be **no** past anonymous polls in this server." ) fmt = [] for poll in past_polls: fmt.append(f"**[{poll['poll_id']}]** `{poll['question']}`") fmt = "\n".join(fmt) embed = discord.Embed(title="Past Anonymous Polls", description=fmt, colour=discord.Colour.dark_teal()) return await ctx.send(embed=embed) past_poll = await self.bot.database.fetch_poll(poll_id, ctx.guild.id) if past_poll is None: return await ctx.send( "There was not past poll with the above poll ID in this guild." ) fmt = [] counter = 0x1f1e6 for amt, option in zip(past_poll['votes'], past_poll['options']): fmt.append(f"{chr(counter)} `{option}` **{amt}**") counter += 1 fmt = "\n\n".join(fmt) embed = discord.Embed(title=past_poll["question"], description=fmt, colour=discord.Colour.dark_teal()) embed.set_footer(text=f"Requested by {ctx.author.name}.", icon_url=ctx.author.avatar_url) embed.set_author(name=f"Poll ID: {poll_id}") end = humanize.naturaldate(past_poll['end_time']) embed.add_field(name="Ended", value=end.title()) await ctx.send(embed=embed)
def natural_datetime(dt_aware): dt = dt_aware.replace(tzinfo=None) today = datetime.date.today() if (today == dt.date()): return humanize.naturaltime(dt) natural_date = humanize.naturaldate(dt) natural_time = dt.strftime('%H:%M') return f'{natural_date} at {natural_time}'
def GetItem(url, items, globalDict, extDict): token = GetToken() header = {'Authorization': 'Bearer {}'.format(token)} r = requests.get(url, headers=header) data = json.loads(r.text) values = data.get('value') print('fetching data from {}'.format(url)) if len(values) > 0: for value in values: item = {} if value.get('folder'): item['type'] = 'folder' item['name'] = value['name'] item['id'] = value['id'] item['size'] = humanize.naturalsize(value['size'], gnu=True) item['lastModtime'] = humanize.naturaldate( parse(value['lastModifiedDateTime'])) if value.get('folder').get('childCount') == 0: item['value'] = {} else: url = app_url + '_api/v2.0/me' + value.get( 'parentReference').get('path') + '/' + value.get( 'name') + ':/children?expand=thumbnails' item['value'], globalDict, extDict = GetItem( url, collections.OrderedDict(), globalDict, extDict) globalDict[value['id']] = dict(name=value['name']) else: item['type'] = 'file', item['name'] = value['name'] item['id'] = value['id'] item['size'] = humanize.naturalsize(value['size'], gnu=True) item['lastModtime'] = humanize.naturaldate( parse(value['lastModifiedDateTime'])) item['downloadUrl'] = value['@content.downloadUrl'] globalDict[value['id']] = dict( name=value['name'], downloadUrl=value['@content.downloadUrl']) extDict.setdefault(GetExt(value['name']), []).append(value['id']) items[item['name']] = item if data.get('@odata.nextLink'): GetItem(data.get('@odata.nextLink'), items, globalDict, extDict) return items, globalDict, extDict
def local(team: Optional[str] = None): """Lists synced datasets, stored in the specified path. """ table = Table(["name", "images", "sync_date", "size"], [Table.L, Table.R, Table.R, Table.R]) client = _load_client(offline=True) for dataset_path in client.list_local_datasets(team=team): table.add_row({ "name": f"{dataset_path.parent.name}/{dataset_path.name}", "images": sum(1 for _ in find_files([dataset_path])), "sync_date": humanize.naturaldate( datetime.datetime.fromtimestamp(dataset_path.stat().st_mtime)), "size": humanize.naturalsize( sum(p.stat().st_size for p in find_files([dataset_path]))), }) # List deprecated datasets deprecated_local_datasets = client.list_deprecated_local_datasets() if deprecated_local_datasets: for dataset_path in client.list_deprecated_local_datasets(): table.add_row({ "name": dataset_path.name + " (deprecated format)", "images": sum(1 for _ in find_files([dataset_path])), "sync_date": humanize.naturaldate( datetime.datetime.fromtimestamp( dataset_path.stat().st_mtime)), "size": humanize.naturalsize( sum(p.stat().st_size for p in find_files([dataset_path]))), }) print(table) if len(list(deprecated_local_datasets)): print( f"\nWARNING: found some local datasets that use a deprecated format " f"not supported by the recent version of darwin-py. " f"Run `darwin dataset migrate team_slug/dataset_slug` " "if you want to be able to use them in darwin-py.")
def GetItem(self,url,grandid=0,parent='',trytime=1): app_url=GetAppUrl() token=GetToken() print(u'getting files from url {}'.format(url)) header={'Authorization': 'Bearer {}'.format(token)} try: r=requests.get(url,headers=header) data=json.loads(r.content) values=data.get('value') #print url if len(values)>0: for value in values: item={} if value.get('folder'): item['type']='folder' item['name']=convert2unicode(value['name']) item['id']=convert2unicode(value['id']) item['size']=humanize.naturalsize(value['size'], gnu=True) item['lastModtime']=humanize.naturaldate(parse(value['lastModifiedDateTime'])) item['grandid']=grandid item['parent']=parent subfodler=items.insert_one(item) if value.get('folder').get('childCount')==0: continue else: url=app_url+'_api/v2.0/me'+value.get('parentReference').get('path')+'/'+value.get('name')+':/children?expand=thumbnails' self.queue.put(dict(url=url,grandid=grandid+1,parent=item['id'],trytime=1)) else: item['type']='file' item['name']=convert2unicode(value['name']) item['id']=convert2unicode(value['id']) item['size']=humanize.naturalsize(value['size'], gnu=True) item['lastModtime']=humanize.naturaldate(parse(value['lastModifiedDateTime'])) item['grandid']=grandid item['parent']=parent items.insert_one(item) if data.get('@odata.nextLink'): self.queue.put(dict(url=data.get('@odata.nextLink'),grandid=grandid,parent=parent,trytime=1)) except Exception as e: trytime+=1 print(u'error to opreate GetItem("{}","{}","{}"),try times :{}, reason: {}'.format(url,grandid,parent,trytime,e)) if trytime<=3: self.queue.put(dict(url=url,grandid=grandid,parent=parent,trytime=trytime))
def table_to_natural_text(df: pd.DataFrame, schema: Schema) -> List[str]: text = [] if schema.has_header and schema.has_row_labels: time_columns = [ col for col, typ in schema.data_types.items() if typ == DataType.DATE ] if len(time_columns) == 1: time_column = time_columns[0] header = None for i, row in df.iterrows(): if i == 0: header = row continue if i < schema.n_header_rows: continue for col in schema.columns[1:]: if col != time_column: value = value_to_natural_text(row[col]) date, _ = infer_type(row[time_column]) natural_time = humanize.naturaldate(date) if natural_time.isalpha(): natural_time = natural_time[0].upper( ) + natural_time[1:] else: natural_time = 'On ' + natural_time text.append('{}, the {} of {} is {}.'.format( natural_time, label_to_natural_text(header[col]), row[schema.columns[0]], value)) else: header = None for i, row in df.iterrows(): if i == 0: header = row continue for col in schema.columns[1:]: value = value_to_natural_text(row[col]) text.append('The {} of {} is {}.'.format( label_to_natural_text(header[col]), row[schema.columns[0]], value)) else: for _, row in df.iterrows(): values = [] for col in schema.columns: values.append(value_to_natural_text(row[col])) text.append(' '.join(values)) return text
def view(nick, reverse): source = twtxt.view(nick, reverse=reverse) puts("@{0} - {1}".format(colored.black(source.nick, bold=True), source.url)) for tweet in source.get_tweets(): puts(columns( [colored.magenta(humanize.naturaldate(tweet.timestamp)), 10], [tweet.text, 69] ))
def timeline(reverse): timeline = twtxt.timeline(reverse=reverse) for tweet in timeline: tweet.process_text() puts(columns( [colored.black(tweet.source.nick, bold=True), 10], [colored.magenta(humanize.naturaldate(tweet.timestamp)), 10], [tweet.text, 59] ))
async def memberinfo(self, ctx: commands.Context, member: discord.Member = Author): """ Get info on a guild member """ data = { "id": member.id, "top role": member.top_role.name, "joined guild": humanize.naturaldate(member.joined_at), "joined discord": humanize.naturaldate(member.created_at), } paginator = PrologPaginator() paginator.recursively_add_dictonary({member.name: data}) source = NormalPageSource(paginator.pages) menu = DCMenuPages(source) await menu.start(ctx)
def view(nick, reverse): source = twtxt.view(nick, reverse=reverse) puts("@{0} - {1}".format(colored.black(source.nick, bold=True), source.url)) for tweet in source.get_tweets(): puts( columns( [colored.magenta(humanize.naturaldate(tweet.timestamp)), 10], [tweet.text, 69]))
def updateCardDueDate(self, action): data = action["data"] context = { "card_link": data["card"]["shortLink"], "card_name": data["card"]["name"], "card_due": naturaldate(parser.parse(data["card"]["due"])), } payload = u""":incoming_envelope: Card updated: "[{card_name}](https://trello.com/c/{card_link})" **Due Date**: Due {card_due}""" return payload.format(**context)
def adj_dates(angle=0, pretty=False): """ Adjust dates in the horizontal label of a matplotlib plot. """ xs, _ = plt.xticks() labels = None if pretty: pretty = (lambda x: humanize.naturaldate(x).title()) if pretty is True else pretty labels = [pretty(date.fromordinal(x)) for x in xs] plt.xticks(xs, labels, rotation=angle)
def updateCardDueDate(self, action): data = action['data'] context = { 'card_link': data['card']['shortLink'], 'card_name': data['card']['name'], 'card_due': naturaldate(parser.parse(data['card']['due'])), } payload = u''':incoming_envelope: Card updated: "[{card_name}](https://trello.com/c/{card_link})" **Due Date**: Due {card_due}''' return payload.format(**context)
def timeline(reverse): timeline = twtxt.timeline(reverse=reverse) for tweet in timeline: tweet.process_text() puts( columns( [colored.black(tweet.source.nick, bold=True), 10], [colored.magenta(humanize.naturaldate(tweet.timestamp)), 10], [tweet.text, 59]))
def AddResource(data): #检查父文件夹是否在数据库,如果不在则获取添加 grand_path=data.get('parentReference').get('path').replace('/drive/root:','') if grand_path=='': parent_id='' grandid=0 else: g=GetItemThread(Queue()) parent_id=data.get('parentReference').get('id') grandid=len(data.get('parentReference').get('path').replace('/drive/root:','').split('/'))-1 grand_path=grand_path[1:] parent_path='' pid='' for idx,p in enumerate(grand_path.split('/')): parent=items.find_one({'name':p,'grandid':idx,'parent':pid}) if parent is not None: pid=parent['id'] parent_path='/'.join([parent_path,parent['name']]) else: parent_path='/'.join([parent_path,p]) fdata=g.GetItemByPath(parent_path) item={} item['type']='folder' item['name']=fdata.get('name') item['id']=fdata.get('id') item['size']=humanize.naturalsize(fdata.get('size'), gnu=True) item['lastModtime']=humanize.naturaldate(parse(fdata.get('lastModifiedDateTime'))) item['grandid']=idx item['parent']=pid items.insert_one(item) pid=fdata.get('id') #插入数据 item={} item['type']='file' item['name']=data.get('name') item['id']=data.get('id') item['size']=humanize.naturalsize(data.get('size'), gnu=True) item['lastModtime']=humanize.naturaldate(parse(data.get('lastModifiedDateTime'))) item['grandid']=grandid item['parent']=parent_id items.insert_one(item)
def events_all(self): datemask = "%Y-%m-%d" events = self.Events.find().sort("date_added", pymongo.DESCENDING) new_events = [] for event in events: date = event["date"] c_date = datetime.datetime.strptime(date, datemask) event["TimeStamp"] = humanize.naturaltime(datetime.datetime.now() - c_date) event["ndate"] = humanize.naturaldate(c_date) new_events.append(event) return new_events
def format_datetime(self, dt): if not dt: return "" if isinstance(dt, datetime.datetime): rv = humanize.naturaltime(self.now - dt) if " from now" in rv: rv = "in {}".format(rv[:-9]) elif isinstance(dt, datetime.date): rv = humanize.naturaldate(dt) return rv
def get_humanized_datetime(datetime_string: str, dest_time_zone_str: Optional[str] = None) -> str: parsed = datetime.strptime(datetime_string, '%Y-%m-%dT%H:%M:%SZ').replace(tzinfo=UTC) if dest_time_zone_str: dest_time_zone = timezone(dest_time_zone_str) parsed = parsed.astimezone(dest_time_zone) else: dest_time_zone = UTC result = humanize.naturaldate(parsed) if result != 'today': return result return humanize.naturaltime(datetime.now(dest_time_zone) - parsed)
def GetItem(url, grandid=0, parent=''): token = GetToken() header = {'Authorization': 'Bearer {}'.format(token)} r = requests.get(url, headers=header) data = json.loads(r.content) values = data.get('value') #print url if len(values) > 0: for value in values: item = {} if value.get('folder'): item['type'] = 'folder' item['name'] = convert2unicode(value['name']) item['id'] = convert2unicode(value['id']) item['size'] = humanize.naturalsize(value['size'], gnu=True) item['lastModtime'] = humanize.naturaldate( parse(value['lastModifiedDateTime'])) item['grandid'] = grandid item['parent'] = parent subfodler = items.insert_one(item) if value.get('folder').get('childCount') == 0: continue else: url = app_url + '_api/v2.0/me' + value.get( 'parentReference').get('path') + '/' + value.get( 'name') + ':/children?expand=thumbnails' GetItem(url, grandid + 1, item['id']) else: item['type'] = 'file' item['name'] = convert2unicode(value['name']) item['id'] = convert2unicode(value['id']) item['size'] = humanize.naturalsize(value['size'], gnu=True) item['lastModtime'] = humanize.naturaldate( parse(value['lastModifiedDateTime'])) item['grandid'] = grandid item['parent'] = parent items.insert_one(item) if data.get('@odata.nextLink'): GetItem(data.get('@odata.nextLink'), grandid, parent)
async def stats(ctx, user: discord.Member): now = dt.utcnow() duration = humanize.naturaltime((now - user.joined_at).total_seconds()) embed = discord.Embed(title=user.name, description='This is you', color=0xFFFF00) embed.add_field(name='Name', value=user.name, inline=False) embed.add_field(name='Date Joined', value=humanize.naturaldate(user.joined_at), inline=False) embed.add_field(name='Joined', value='{}'.format(duration), inline=False) await bot.say(embed=embed)
def naturalized_datetime_str(dt: Optional[datetime]) -> str: """ Naturalise a datetime object.""" if dt is None: return "never" # humanize uses the local now internally, so let's make dt local local_timezone = tzlocal.get_localzone() local_dt = ( dt.replace(tzinfo=pytz.utc).astimezone(local_timezone).replace(tzinfo=None) ) if dt >= datetime.utcnow() - timedelta(hours=24): return naturaltime(local_dt) else: return naturaldate(local_dt)
def adj_dates(angle=0, pretty=False, ax=None): """ Adjust dates in the horizontal label of a matplotlib plot. """ if ax is None: ax = plt.gca() xs = ax.get_xticks() labels = None if pretty: pretty = (lambda x: humanize.naturaldate(x).title()) if pretty is True else pretty labels = [pretty(datetime.date.fromordinal(int(x))) for x in xs] ax.set_xticks(xs, labels) ax.tick_params("x", labelrotation=angle)
def naturaldate_filter(date): """Try to use Humanize to show natural date""" try: import humanize if session.get('language') != "en": # pragma: no cover humanize.i18n.activate(session.get('language')) else: humanize.i18n.deactivate() date_str = humanize.naturaldate(date) today = datetime.combine(date.today(), time(0)) seconds = int((date-today).total_seconds()) return date_str + ' ' + timeonly_filter(seconds, enable_seconds=False, cycle=True) except ImportError: # pragma: no cover return dtp_filter(date)
def value_to_natural_text(value: str) -> str: text, data_type = infer_type(value) if data_type == DataType.INT: text = humanize.intword(text) elif data_type == DataType.FLOAT: text = '%.2f' % text elif data_type == DataType.BOOL: text = 'true' if text else 'false' elif data_type == DataType.DATE: text = humanize.naturaldate(text) elif data_type == DataType.NULL: text = 'NA' return str(text)
def upcoming_events(self): datemask = "%Y-%m-%d" events = self.Events.find().sort("date_added", pymongo.DESCENDING) new_events = [] for event in events: event["user"] = self.Users.find_one( {"username": event["username"]}) date = event["date"] c_date = datetime.datetime.strptime(date, datemask) event["TimeStamp"] = humanize.naturaltime(datetime.datetime.now() - c_date) event["ndate"] = humanize.naturaldate(c_date) if c_date >= datetime.datetime.today(): new_events.append(event) return new_events
def format_page(self, menu, page: Rule34Post): embed = BetterEmbed(title=f'Results for : {self.query}', url=page.file_url) created_at = datetime.strptime( page.created_at, "%a %b %d %H:%M:%S %z %Y") # there must be a format for that fields = ( ('Size', f'{page.width}x{page.height}'), ('Creator id', page.creator_ID), ('Created at', naturaldate(created_at)), ) embed.set_image(url=page.file_url) return embed.add_fields(fields)
def local(): """Lists synced datasets, stored in the specified path. """ table = Table(["name", "images", "sync_date", "size"], [Table.L, Table.R, Table.R, Table.R]) client = _load_client(offline=True) for dataset_path in client.list_local_datasets(): table.add_row({ "name": dataset_path.name, "images": sum(1 for _ in find_files([dataset_path])), "sync_date": humanize.naturaldate( datetime.datetime.fromtimestamp(dataset_path.stat().st_mtime)), "size": humanize.naturalsize( sum(p.stat().st_size for p in find_files([dataset_path]))), }) print(table)
def user_stats(): last_ten_scrobbles = Scrobbles.query.order_by( Scrobbles.scrobble_timestamp.desc() ).limit(10) scrobble_count, first_scrobble, last_scrobble = db.session.query( sqlalchemy.func.count(Scrobbles.id), sqlalchemy.func.min(Scrobbles.scrobble_timestamp), sqlalchemy.func.max(Scrobbles.scrobble_timestamp) ).one() return jsonify({ 'stats': { 'scrobble_count': scrobble_count, 'first_scrobble': naturaldate(first_scrobble), 'last_scrobble': last_scrobble, 'last_ten_scrobbles': [ scrobble.json() for scrobble in last_ten_scrobbles ] } })
async def get_guild_premium(self, ctx): """Shows the premium status of the current guild/server""" endsat = await functions.get_prem_endsat(self.bot, ctx.guild.id) now = datetime.datetime.now() if endsat: natural_endsin = humanize.naturaldelta(endsat - now) else: natural_endsin = None natural_endsat = humanize.naturaldate(endsat) description = ("It looks like this server doesn't have " "premium yet. Somone with premium credits " "must use the `redeem` command here to give " "it premium!") if endsat is None else ( f"This server has premium until {natural_endsat}, " f"which is {natural_endsin} from now.") embed = discord.Embed(title="Server Premium Status", description=description, color=bot_config.COLOR) await ctx.send(embed=embed)
async def serverinfo(self, ctx, guild=None): """ Shows you the guild's informations. """ guild1 = guild if guild1 is None: guild1 = ctx.message.guild else: if self.bot.get_guild(guild) is None: return await ctx.send( "Bot do not have permission to view that guild") else: guild1 = self.bot.get_guild(guild) categories = len(guild1.categories) channels = len(guild1.channels) created_at = humanize.naturaldate(guild1.created_at) default_role = guild1.default_role.name features = "\n".join( list( map(lambda f: f.title(), [feature.replace("_", " ") for feature in guild1.features]))) description = guild1.description emoji_limit = guild1.emoji_limit emojis_count = len(guild1.emojis) guild_id = guild1.id guild_name = guild1.name guild_owner = guild1.owner guild_owner_id = guild1.owner_id member_count = guild1.member_count embed = discord.Embed(color=self.bot.color) embed.set_thumbnail(url=str(guild1.icon_url_as(static_format="png"))) embed.set_author(name=guild_name) embed.add_field( name="Infos", value= f"**Categories Count**: {categories}\n**Channels Count**: {channels}\n**Created_at**: {created_at}\n**Default Role**: {default_role}\n**Emoji Count:** {emojis_count}\n**Features:** \n{features}\n**Description**: {description}\n**Emoji Limit**: {emoji_limit}\n**Guild Id**: {guild_id}\n**Guild Owner**: {guild_owner}\n**Guild Owner UserId**: {guild_owner_id}\n**Member Count**: {member_count}" ) await ctx.reply(embed=embed)
def ta_sha1sum(app, args): """Show transactions associated with a sha1sum""" db_t, db_s2t = get_mongo_transact_db(app) if len(args.object) == 40 and not os.path.exists(args.object): sha1sum = args.object else: madfile = get_mad_file(app, args.object) sha1sum = madfile['sha1sum'] for s2t in db_s2t.find(dict(sha1sum=sha1sum)): tra = db_t.find_one(dict(_id=s2t['transaction_id'])) natime = humanize.naturaldate(tra['time']) for io in tra['io']: if io['sha1sum'] == sha1sum: ncl = " ".join(shlex.split(tra.get('cl', 'n.a.'))) if len(ncl) > 50: ncl = ncl[:47] + '...' cprint(tra['_id'], color='cyan', end=' (') cprint(io['category'], color='yellow', end=') ') cprint(natime, color='green', end=": ") cprint(ncl)
def index(self): app_ids = list(map( bytes.decode, r.zrevrange('zset.date.app.id', 0, -1) )) apps = [ dict( name=r.hget('app:'+app_id, 'name').decode(), variant=r.hget('app:'+app_id, 'variant').decode(), page=r.hget('app:'+app_id, 'page').decode(), date=humanize.naturaldate(datetime.datetime.fromtimestamp( int(r.hget('app:'+app_id, 'date').decode()))), size=humanize.naturalsize( int(r.hget('app:'+app_id, 'size').decode()), binary=True), filename=r.hget('app:'+app_id, 'filename').decode() ) for app_id in app_ids ] last_updated = humanize.naturaltime(datetime.datetime.fromtimestamp( float(r.get('last-updated')))) return render_template( 'applist.html', apps=apps, last_updated=last_updated )
def list(self): """List images.""" images = sorted( self.client.images.list(), key=operator.attrgetter(self._args.sort)) if not images: return rows = list() for image in images: fields = dict(image) fields.update({ 'created': humanize.naturaldate(podman.datetime_parse(image.created)), 'size': humanize.naturalsize(int(image.size)), 'repoDigests': ' '.join(image.repoDigests), }) for r in image.repoTags: name, tag = r.split(':', 1) fields.update({ 'name': name, 'tag': tag, }) rows.append(fields) if not self._args.digests: del self.columns['repoDigests'] with Report(self.columns, heading=self._args.heading) as report: report.layout( rows, self.columns.keys(), truncate=self._args.truncate) for row in rows: report.row(**row)
def snap_details(snap_name): """ A view to display the snap details page for specific snaps. This queries the snapcraft API (api.snapcraft.io) and passes some of the data through to the snap-details.html template, with appropriate sanitation. """ error_info = {} status_code = 200 try: details = api.get_snap_details(snap_name) except ApiTimeoutError as api_timeout_error: flask.abort(504, str(api_timeout_error)) except ApiResponseDecodeError as api_response_decode_error: flask.abort(502, str(api_response_decode_error)) except ApiResponseErrorList as api_response_error_list: if api_response_error_list.status_code == 404: flask.abort(404, "No snap named {}".format(snap_name)) else: error_messages = ", ".join( api_response_error_list.errors.key() ) flask.abort(502, error_messages) except ApiResponseError as api_response_error: flask.abort(502, str(api_response_error)) except ApiError as api_error: flask.abort(502, str(api_error)) # When removing all the channel maps of an exsting snap the API, # responds that the snaps still exists with data. # Return a 404 if not channel maps, to avoid having a error. # For example: mir-kiosk-browser if not details.get("channel-map"): flask.abort(404, "No snap named {}".format(snap_name)) formatted_paragraphs = logic.split_description_into_paragraphs( details["snap"]["description"] ) channel_maps_list = logic.convert_channel_maps( details.get("channel-map") ) latest_channel = logic.get_last_updated_version( details.get("channel-map") ) last_updated = latest_channel["created-at"] last_version = latest_channel["version"] binary_filesize = latest_channel["download"]["size"] country_metric_name = "weekly_installed_base_by_country_percent" os_metric_name = "weekly_installed_base_by_operating_system_normalized" webapp_config = flask.current_app.config.get("WEBAPP_CONFIG") if "STORE_QUERY" not in webapp_config: end = metrics_helper.get_last_metrics_processed_date() metrics_query_json = [ metrics_helper.get_filter( metric_name=country_metric_name, snap_id=details["snap-id"], start=end, end=end, ), metrics_helper.get_filter( metric_name=os_metric_name, snap_id=details["snap-id"], start=end, end=end, ), ] try: metrics_response = api.get_public_metrics( snap_name, metrics_query_json ) except ApiError as api_error: status_code, error_info = _handle_errors(api_error) metrics_response = None os_metrics = None country_devices = None if metrics_response: oses = metrics_helper.find_metric( metrics_response, os_metric_name ) os_metrics = metrics.OsMetric( name=oses["metric_name"], series=oses["series"], buckets=oses["buckets"], status=oses["status"], ) territories = metrics_helper.find_metric( metrics_response, country_metric_name ) country_devices = metrics.CountryDevices( name=territories["metric_name"], series=territories["series"], buckets=territories["buckets"], status=territories["status"], private=False, ) else: os_metrics = None country_devices = None # filter out banner and banner-icon images from screenshots screenshots = [ m["url"] for m in details["snap"]["media"] if m["type"] == "screenshot" and "banner" not in m["url"] ] icons = [ m["url"] for m in details["snap"]["media"] if m["type"] == "icon" ] # until default tracks are supported by the API we special case node # to use 10, rather then latest default_track = "10" if details["name"] == "node" else "latest" lowest_risk_available = logic.get_lowest_available_risk( channel_maps_list, default_track ) confinement = logic.get_confinement( channel_maps_list, default_track, lowest_risk_available ) context = { # Data direct from details API "snap_title": details["snap"]["title"], "package_name": details["name"], "icon_url": icons[0] if icons else None, "version": last_version, "license": details["snap"]["license"], "publisher": details["snap"]["publisher"]["display-name"], "username": details["snap"]["publisher"]["username"], "screenshots": screenshots, "prices": details["snap"]["prices"], "contact": details["snap"].get("contact"), "website": details["snap"].get("website"), "summary": details["snap"]["summary"], "description_paragraphs": formatted_paragraphs, "channel_map": channel_maps_list, "has_stable": logic.has_stable(channel_maps_list), "developer_validation": details["snap"]["publisher"]["validation"], "default_track": default_track, "lowest_risk_available": lowest_risk_available, "confinement": confinement, # Transformed API data "filesize": humanize.naturalsize(binary_filesize), "last_updated": (humanize.naturaldate(parser.parse(last_updated))), "last_updated_raw": last_updated, # Data from metrics API "countries": ( country_devices.country_data if country_devices else None ), "normalized_os": os_metrics.os if os_metrics else None, # Context info "is_linux": ( "Linux" in flask.request.headers.get("User-Agent", "") and "Android" not in flask.request.headers.get("User-Agent", "") ), "error_info": error_info, } return ( flask.render_template("store/snap-details.html", **context), status_code, )
def format_date(date): return "%s (%s)" % ( humanize.naturaldate(parse_gerrit_time(date)), humanize.naturaldelta(parse_gerrit_time(date)), )
def slang_date(self): """"Returns human slang representation of date.""" dt = self.datetime(naive=True, to_timezone=self.local_timezone) return humanize.naturaldate(dt)
def human_date(self): return humanize.naturaldate(self.created_date)
def humandate(value): return humanize.naturaldate(value)