def friendly_time(jinja_ctx, context, **kw): """Format timestamp in human readable format. * Context must be a datetimeobject * Takes optional keyword argument timezone which is a timezone name as a string. Assume the source datetime is in this timezone. """ now = context if not now: return "" tz = kw.get("source_timezone", None) if tz: tz = timezone(tz) else: tz = datetime.timezone.utc # Meke relative time between two timestamps now = now.astimezone(tz) arrow = Arrow.fromdatetime(now) other = Arrow.fromdatetime(datetime.datetime.utcnow()) return arrow.humanize(other)
def __init__(self, source, n, str_type, count_type, start_date=Arrow(1970, 1, 1).datetime, stop_date=utcnow().datetime): super(Vector, self).__init__(n=n, str_type=str_type) if source not in c['Comment'].collection_names(): raise ValueError( "{} is not a collection in the Comment database".format( source)) if str_type not in StringLike.__subclasses__(): raise ValueError( "{} is not a valid string type class".format(str_type)) for date in [start_date, stop_date]: if not isinstance(date, datetime): raise TypeError( "{} is not a datetime.datetime object".format(date)) self.count_type = count_type self.start_date = Arrow.fromdatetime(start_date).datetime self.stop_date = Arrow.fromdatetime(stop_date).datetime self.body = c['Body'][source] self.cache = c['BodyCache'][source] self.comment = c['Comment'][source] self.__fromdb__()
def test_window_period_weeks(self): wp = WindowPeriod(rlower=relativedelta(weeks=1), rupper=relativedelta(weeks=6)) dt = Arrow.fromdatetime(datetime(2001, 12, 8), tzinfo="utc").datetime self.assertEqual( wp.get_window(dt).lower, Arrow.fromdatetime(datetime(2001, 12, 1), tzinfo="utc").datetime, ) self.assertEqual( wp.get_window(dt).upper, Arrow.fromdatetime(datetime(2002, 1, 19, 23, 59), tzinfo="utc").datetime, )
def test_window_period_days(self): wp = WindowPeriod(rlower=relativedelta(days=0), rupper=relativedelta(days=6)) dt = Arrow.fromdatetime(datetime(2001, 12, 1), tzinfo="utc").datetime self.assertEqual(wp.get_window(dt)[0], dt) self.assertEqual(wp.get_window(dt).lower, dt) self.assertEqual( wp.get_window(dt)[1], Arrow.fromdatetime(datetime(2001, 12, 7, 23, 59), tzinfo="utc").datetime, ) self.assertEqual( wp.get_window(dt).upper, Arrow.fromdatetime(datetime(2001, 12, 7, 23, 59), tzinfo="utc").datetime, )
def nightscout_to_dexcom(self, sgvs): return [{ 'DT': '/Date(' + Arrow.fromdatetime(sgv.date).format('XSSSZ') + ')/', 'ST': '/Date(' + Arrow.fromdatetime(sgv.date).format('XSSSZ') + ')/', 'WT': '/Date(' + Arrow.fromdatetime(sgv.date).format('XSSS') + ')/', 'Value': int(sgv.sgv), 'Trend': DexcomBridge.convert_slope(sgv.direction) } for sgv in sgvs]
def parse_user(response_content: str) -> User: """Parser of JSON string to User.""" user_json = json.loads(response_content)['data']['user'] legacy_user_json = user_json['legacy'] return User( created_at=Arrow.fromdatetime( parser.parse(legacy_user_json['created_at'])), id_str=user_json['id'], rest_id_str=user_json['rest_id'], default_profile=legacy_user_json['default_profile'], default_profile_image=legacy_user_json['default_profile_image'], description=legacy_user_json['description'], favourites_count=legacy_user_json['favourites_count'], followers_count=legacy_user_json['followers_count'], friends_count=legacy_user_json['friends_count'], has_custom_timelines=legacy_user_json['has_custom_timelines'], listed_count=legacy_user_json['listed_count'], location=legacy_user_json['location'], media_count=legacy_user_json['media_count'], name=legacy_user_json['name'], pinned_tweet_ids_str=legacy_user_json['pinned_tweet_ids_str'], profile_banner_url=legacy_user_json['profile_banner_url'] if 'profile_banner_url' in legacy_user_json else '', profile_image_url_https=legacy_user_json['profile_image_url_https'], protected=legacy_user_json['protected'], screen_name=legacy_user_json['screen_name'], statuses_count=legacy_user_json['statuses_count'], verified=legacy_user_json['verified'])
def _get_queryset(self): self.per_page = self.request.GET.get("per_page", self.per_page) time_axis_origin = Arrow.fromdatetime(timezone.now()).shift(years=-1).datetime queryset = ( models.Dependency.objects.annotate(dep_count=Count("depusage")) .prefetch_related( Prefetch( "snapshots", queryset=models.DependencySnapshot.objects.filter( timestamp__gte=time_axis_origin ).order_by("timestamp"), ) ) .prefetch_related("depusage") .all() .order_by("-dep_count") ) if "q" in self.request.GET: queryset = queryset.filter( Q(name__icontains=self.request.GET["q"]) | Q(depusage__repo__services__name__icontains=self.request.GET["q"]) ) if "type" in self.request.GET: queryset = queryset.filter(type=self.request.GET["type"]) paginator = Paginator(queryset, self.per_page) return paginator.get_page(self.request.GET.get("page", 1))
def _tweet_dict_to_tweet_object(tweet) -> Tweet: return Tweet( created_at=Arrow.fromdatetime(parser.parse(tweet['created_at'])), id_str=tweet['id_str'], conversation_id_str=tweet['conversation_id_str'], # there was the problem with pandas exporting because \r is old version of \n full_text=tweet['full_text'].replace('\r', '\n'), lang=tweet['lang'], favorited=tweet['favorited'], retweeted=tweet['retweeted'], retweet_count=tweet['retweet_count'], favorite_count=tweet['favorite_count'], reply_count=tweet['reply_count'], quote_count=tweet['quote_count'], quoted_status_id_str=BaseTweetParser._get_default_string_value_from_dict( tweet, 'quoted_status_id_str'), quoted_status_short_url=BaseTweetParser._get_default_string_value_from_dict( tweet, 'quoted_status_short_url'), quoted_status_expand_url=BaseTweetParser._get_default_string_value_from_dict( tweet, 'quoted_status_expand_url'), user_id_str=tweet['user_data']['id_str'], user_name=tweet['user_data']['screen_name'], user_full_name=tweet['user_data']['name'], user_verified=tweet['user_data']['verified'], in_reply_to_status_id_str=_default_string_value(tweet['in_reply_to_status_id_str'], ''), in_reply_to_user_id_str=_default_string_value( tweet['in_reply_to_user_id_str'], ''), media=BaseTweetParser._get_media_list_from_tweet_object(tweet), hashtags=['#' + it['text'] for it in tweet['entities']['hashtags']], mentions=[it['screen_name'] for it in tweet['entities']['user_mentions']], urls=[it['url'] for it in tweet['entities']['urls']] )
async def update_status(self): dt = self.loop.next_iteration if not dt: return time = Arrow.fromdatetime(self.loop.next_iteration) await self.bot.change_presence( activity=discord.Game(f'next feed {time.humanize()}'))
def format_dt_tz(now: t.Optional[date_type] = None, **kw): """Format datetime in a certain timezone.""" if not now: return "" tz = kw.get("timezone", None) if tz: tz = timezone(tz) else: tz = datetime.timezone.utc locale = kw.get("locale", "en_US") arrow = Arrow.fromdatetime(now, tzinfo=tz) # Convert to target timezone tz = kw.get("target_timezone") if tz: arrow = arrow.to(tz) else: tz = arrow.tzinfo format_ = kw.get("format", "YYYY-MM-DD HH:mm") text = arrow.format(format_, locale=locale) if kw.get("show_timezone"): text = text + " ({})".format(tz) return text
def filter_datetime(jinja_ctx, context, **kw): """Format datetime in a certain timezone.""" now = context if not now: return "" tz = kw.get("timezone", None) if tz: tz = timezone(tz) else: tz = datetime.timezone.utc locale = kw.get("locale", "en_US") arrow = Arrow.fromdatetime(now, tzinfo=tz) # Convert to target timezone tz = kw.get("target_timezone") if tz: arrow = arrow.to(tz) else: tz = arrow.tzinfo format = kw.get("format", "YYYY-MM-DD HH:mm") text = arrow.format(format, locale=locale) if kw.get("show_timezone"): text = text + " ({})".format(tz) return text
def __init__(self, source, n, str_type, count_type, start_date=Arrow(1970,1,1).datetime, stop_date=utcnow().datetime): super(Vector, self).__init__(n=n, str_type=str_type) if source not in c['Comment'].collection_names(): raise ValueError("{} is not a collection in the Comment database".format(source)) if str_type not in StringLike.__subclasses__(): raise ValueError("{} is not a valid string type class".format(str_type)) for date in [start_date, stop_date]: if not isinstance(date, datetime): raise TypeError("{} is not a datetime.datetime object".format(date)) self.count_type = count_type self.start_date = Arrow.fromdatetime(start_date).datetime self.stop_date = Arrow.fromdatetime(stop_date).datetime self.body = c['Body'][source] self.cache = c['BodyCache'][source] self.comment = c['Comment'][source] self.__fromdb__()
def arrow(date=None, tz=None): if date is None: return utcnow() if tz is None else now(tz) else: if tz is None: try: tz = parser.TzinfoParser.parse(date) return now(tz) except: return Arrow.fromdatetime(date) else: tz = parser.TzinfoParser.parse(tz) return Arrow.fromdatetime(date, tz)
def test_perform_block_create(app): content = 'foo' with freeze_time('2012-01-01') as frozen_datetime: eff = app.perform_block_create(EBlockCreate(content)) block_id = perform_sequence([], eff) assert app.last_modified == Arrow.fromdatetime(frozen_datetime()) eff = app.perform_block_read(EBlockRead(block_id)) block = perform_sequence([], eff) assert block['content'] == content
def test_visit_lower_upper(self): visit = Visit( code="1000", rbase=relativedelta(days=0), rlower=relativedelta(days=0), rupper=relativedelta(days=6), timepoint=1, ) visit.timepoint_datetime = Arrow.fromdatetime( datetime(2001, 12, 1), tzinfo="utc" ).datetime self.assertEqual( visit.dates.lower, Arrow.fromdatetime(datetime(2001, 12, 1), tzinfo="utc").datetime, ) self.assertEqual( visit.dates.upper, Arrow.fromdatetime(datetime(2001, 12, 7, 23, 59), tzinfo="utc").datetime, )
def test_perform_user_vlob_delete(app): eff = app.perform_user_vlob_update(EUserVlobUpdate(1, 'foo')) perform_sequence([], eff) # Delete from new user vlobs with freeze_time('2012-01-01') as frozen_datetime: eff = app.perform_user_vlob_delete(EUserVlobDelete()) perform_sequence([], eff) assert app.last_modified == Arrow.fromdatetime(frozen_datetime()) # Delete in cache app.user_vlob_cache[2] = {'foo': 'bar'} assert app.user_vlob_cache.currsize == 1 with freeze_time('2012-01-01') as frozen_datetime: eff = app.perform_user_vlob_delete(EUserVlobDelete(2)) perform_sequence([], eff) assert app.last_modified == Arrow.fromdatetime(frozen_datetime()) assert app.user_vlob_cache.currsize == 0 # Not found with pytest.raises(UserVlobNotFound): eff = app.perform_user_vlob_delete(EUserVlobDelete(2)) perform_sequence([], eff)
async def list_events( status: Optional[EventPublicationStatus] = None, frm: Optional[datetime] = None, to: Optional[datetime] = None, ): frm = Arrow.fromdatetime(frm) if frm else None to = Arrow.fromdatetime(to) if to else None if status is None: events = await get_all_events(from_date=frm, to_date=to) elif status == EventPublicationStatus.WAITING: events = await list_unpublished_events(frm=frm, to=to) else: events = await events_with_status([status], from_date=frm, to_date=to) events = list(events) if events: show_events(events) else: message = (f"No event found with status: {status.name}" if status is not None else "No event found") click.echo(message)
def test_perform_block_delete(app): content = 'foo' eff = app.perform_block_create(EBlockCreate(content)) block_id = perform_sequence([], eff) # Delete from new blocks with freeze_time('2012-01-01') as frozen_datetime: eff = app.perform_block_delete(EBlockDelete(block_id)) perform_sequence([], eff) assert app.last_modified == Arrow.fromdatetime(frozen_datetime()) # Delete in cache app.block_cache[block_id] = {'foo': 'bar'} assert app.block_cache.currsize == 1 with freeze_time('2012-01-01') as frozen_datetime: eff = app.perform_block_delete(EBlockDelete(block_id)) perform_sequence([], eff) assert app.last_modified == Arrow.fromdatetime(frozen_datetime()) assert app.block_cache.currsize == 0 # Not found with pytest.raises(BlockNotFound): eff = app.perform_block_delete(EBlockDelete(block_id)) perform_sequence([], eff)
async def notify(channel: discord.TextChannel, last_notification: t.Optional[Arrow]) -> t.Optional[Arrow]: """ Send a message in `channel` notifying about a lack of available help channels. If a notification was sent, return the time at which the message was sent. Otherwise, return None. Configuration: * `HelpChannels.notify` - toggle notifications * `HelpChannels.notify_minutes` - minimum interval between notifications * `HelpChannels.notify_roles` - roles mentioned in notifications """ if not constants.HelpChannels.notify: return log.trace("Notifying about lack of channels.") if last_notification: elapsed = (arrow.utcnow() - last_notification).seconds minimum_interval = constants.HelpChannels.notify_minutes * 60 should_send = elapsed >= minimum_interval else: should_send = True if not should_send: log.trace( "Notification not sent because it's too recent since the previous one." ) return try: log.trace("Sending notification message.") mentions = " ".join(f"<@&{role}>" for role in constants.HelpChannels.notify_roles) allowed_roles = [ discord.Object(id_) for id_ in constants.HelpChannels.notify_roles ] message = await channel.send( f"{mentions} A new available help channel is needed but there " f"are no more dormant ones. Consider freeing up some in-use channels manually by " f"using the `{constants.Bot.prefix}dormant` command within the channels.", allowed_mentions=discord.AllowedMentions(everyone=False, roles=allowed_roles)) return Arrow.fromdatetime(message.created_at) except Exception: # Handle it here cause this feature isn't critical for the functionality of the system. log.exception( "Failed to send notification about lack of dormant channels!")
def get_dates(_): days = parse_days(get_content()) # we used to be able to support previous weeks; each item is a week return [ ( ( Arrow.fromdatetime(parse(day)), parse_locations(locations) ) for day, locations in days ) ]
async def list_publications( status: PublicationStatus = None, frm: Optional[datetime] = None, to: Optional[datetime] = None, ): frm = Arrow.fromdatetime(frm) if frm else None to = Arrow.fromdatetime(to) if to else None if status is None: publications = await get_all_publications(from_date=frm, to_date=to) else: publications = await publications_with_status(status, from_date=frm, to_date=to) if publications: show_publications(publications) else: message = (f"No publication found with status: {status.name}" if status is not None else "No publication found") click.echo(message)
def friendly_time(now: datetime.datetime, tz: t.Union[str, None]) -> str: """Format timestamp in human readable format. * Context must be a datetime object * Takes optional keyword argument timezone which is a timezone name as a string. Assume the source datetime is in this timezone. """ if not now: return "" if tz: tz = timezone(tz) else: tz = datetime.timezone.utc # Make relative time between two timestamps now = now.astimezone(tz) arrow_ = Arrow.fromdatetime(now) other = Arrow.fromdatetime(datetime.datetime.utcnow()) return arrow_.humanize(other)
def test_perform_vlob_create(app): blob = 'foo' with freeze_time('2012-01-01') as frozen_datetime: eff = app.perform_vlob_create(EVlobCreate(blob)) vlob = perform_sequence([], eff) assert app.last_modified == Arrow.fromdatetime(frozen_datetime()) vlob_id = vlob['id'] read_trust_seed = vlob['read_trust_seed'] assert sorted(list( vlob.keys())) == ['id', 'read_trust_seed', 'write_trust_seed'] eff = app.perform_vlob_read(EVlobRead(vlob_id, read_trust_seed)) vlob = perform_sequence([], eff) assert vlob['blob'] == blob
def test_visit_datetime(self): visit = Visit( code="1000", rbase=relativedelta(days=0), rlower=relativedelta(days=0), rupper=relativedelta(days=6), timepoint=1, ) visit.timepoint_datetime = datetime(2001, 12, 1) self.assertEqual( visit.timepoint_datetime, Arrow.fromdatetime(datetime(2001, 12, 1), tzinfo="utc"), )
def test_perform_user_vlob_update(app): with freeze_time('2012-01-01') as frozen_datetime: eff = app.perform_user_vlob_update(EUserVlobUpdate(1, 'foo')) perform_sequence([], eff) assert app.last_modified == Arrow.fromdatetime(frozen_datetime()) blob = 'bar' eff = app.perform_user_vlob_update(EUserVlobUpdate(1, blob)) perform_sequence([], eff) eff = app.perform_user_vlob_read(EUserVlobRead()) user_vlob = perform_sequence([], eff) assert sorted(list(user_vlob.keys())) == ['blob', 'version'] assert user_vlob['blob'] == blob assert user_vlob['version'] == 1
def test_(self): visit = Visit(code='1000', timepoint=0) facility = Facility(name='pharmacy', days=[MO, TU, WE, TH, FR, SA, SU]) creator = AppointmentCreator( subject_identifier='12345', suggested_datetime=Arrow.fromdatetime(datetime(2017, 1, 1)), visit=visit, facility=facility, visit_schedule_name='pharmacy', schedule_name='pharmacy', ) # print(creator.appointment) print(creator.appointment.appt_datetime)
def arrow(date=None, tz=None): if date is None: return utcnow() if tz is None else now(tz) else: if tz is None: try: tz = parser.TzinfoParser.parse(date) return now(tz) except: pass if isinstance(date, (float, int)): return Arrow.utcfromtimestamp(date) return Arrow.fromdatetime(date) else: tz = parser.TzinfoParser.parse(tz) return Arrow.fromdatetime(date, tz)
def civil_twilight(date, lon, lat): """ Returns the evening civil twilight time as a `datetime.datetime` in UTC. Takes the date to calculate for (as a `datetime.date`), and the longitude and lattitude of the location. Evening civil twilight is defined as ending when the geometric centre of the sun is 6° below the horizon. """ location = ephem.Observer() location.date = date.strftime("%Y/%m/%d") location.lon = force_str(lon) location.lat = force_str(lat) location.horizon = force_str("-6") twilight = location.next_setting(ephem.Sun(), use_center=True) return Arrow.fromdatetime(twilight.datetime()).datetime
def test_json(self, mocked_now): # patch now inside the Stopwatch object tz = gettz('America/Sao_Paulo') now = datetime(2016, 4, 29, hour=15, minute=38, second=8, tzinfo=tz) mocked_now.return_value = Arrow.fromdatetime(now) # make a new request resp = self.app.get('/api/stopwatch/') json_resp = loads(resp.data.decode('utf-8')) # assertions keys = ('days', 'hours', 'minutes', 'seconds') values = (11, 16, 1, 8) with self.subTest(): for key, value in zip(keys, values): self.assertIn(key, json_resp) self.assertEqual(json_resp[key], value)
async def update_message_caches(message: discord.Message) -> None: """Checks the source of new content in a help channel and updates the appropriate cache.""" channel = message.channel log.trace(f"Checking if #{channel} ({channel.id}) has had a reply.") claimant_id = await _caches.claimants.get(channel.id) if not claimant_id: # The mapping for this channel doesn't exist, we can't do anything. return # datetime.timestamp() would assume it's local, despite d.py giving a (naïve) UTC time. timestamp = Arrow.fromdatetime(message.created_at).timestamp() # Overwrite the appropriate last message cache depending on the author of the message if message.author.id == claimant_id: await _caches.claimant_last_message_times.set(channel.id, timestamp) else: await _caches.non_claimant_last_message_times.set( channel.id, timestamp)
def is_icc_registered_site(report_datetime=None, report_date=None, site=None): """Returns True if site is an open intervention site or raises. Raises if NOT an intervention site or is an NOT intervention with an ICC registration form submitted by this report_datetime. """ if report_datetime: report_date_utc = Arrow.fromdatetime(report_datetime).date() elif report_date: report_date_utc = Arrow.fromdate(report_date).date() site = site or Site.objects.get_current() if is_intervention_site_or_raise(site=site): model_cls = django_apps.get_model("inte_prn.integratedcareclinicregistration") try: model_cls.objects.get( site=site, date_opened__lte=report_date_utc, ) except ObjectDoesNotExist: raise InterventionSiteNotRegistered( f"Site's ICC registration not found. Got `{site.name}`." ) return True
def sql2py(self, val): if isinstance(val, str): return arrow.get(val) else: return Arrow.fromdatetime(super().sql2py(val))
def icc_registered(report_datetime=None): report_datetime_utc = Arrow.fromdatetime(report_datetime) return IntegratedCareClinicRegistration.objects.filter( site=Site.objects.get_current(), date_opened__gte=report_datetime_utc.date(), ).exists()
def index(self, request, extra_context=None): """ Add some extra index context such as a dataset to graph. """ today = Arrow.fromdatetime(now()) last_year = today.replace(months=-11) site_dataset = OrderedDict() sites = [] if request.user.is_superuser: sites = Site.objects.annotate(video_count=Count('camera__video')) else: sites = Site.objects.annotate( video_count=Count('camera__video')).filter(id__lte=2).exclude( pk=17) gf = GraphForm(request.GET) startd = last_year endd = today if gf.is_valid(): startd = Arrow.fromdate(gf.cleaned_data['start_date']) endd = Arrow.fromdate(gf.cleaned_data['end_date']) difference = (endd - startd).days period = 'month' if difference < 31: period = 'day' elif difference < (30 * 6): period = 'week' else: period = 'month' # For every site, aggregate the average number of returning penguins # across the entire month. These calculations are the average over # the median number of penguins observed each day. site_dataset['Total Penguins'] = [] for site in sites: site_dataset[site.name] = [] for start, end in Arrow.span_range(period, startd, endd): average = site.penguincount_set.filter( date__gte=start.date(), date__lte=end.date()).aggregate( penguins=Avg('total_penguins')) site_dataset[site.name].append({ 'date': start.date(), 'value': "%0.2f" % average['penguins'] if (average['penguins'] > 0) else 0.0 }) for item in site_dataset['Total Penguins']: if item['date'] == start.date(): item['value'] = "%0.2f" % ( float(item['value']) + (average['penguins'] if (average['penguins'] > 0) else 0.0)) break else: site_dataset['Total Penguins'].append({ 'date': start.date(), 'value': "%0.2f" % average['penguins'] if (average['penguins'] > 0) else 0.0 }) context = { 'sites': sites, 'site_dataset': site_dataset, 'title': _("Penguin island sites"), 'gform': gf, } context.update(extra_context or {}) return super(PenguinSite, self).index(request, context)
def get(*args, **kwargs): '''Returns an :class:`Arrow <arrow.Arrow>` object based on flexible inputs. Usage:: >>> import arrow **No inputs** to get current UTC time:: >>> arrow.get() <Arrow [2013-05-08T05:51:43.316458+00:00]> **One str**, **float**, or **int**, convertible to a floating-point timestamp, to get that timestamp in UTC:: >>> arrow.get(1367992474.293378) <Arrow [2013-05-08T05:54:34.293378+00:00]> >>> arrow.get(1367992474) <Arrow [2013-05-08T05:54:34+00:00]> >>> arrow.get('1367992474.293378') <Arrow [2013-05-08T05:54:34.293378+00:00]> >>> arrow.get('1367992474') <Arrow [2013-05-08T05:54:34+00:00]> **One str**, convertible to a timezone, or **tzinfo**, to get the current time in that timezone:: >>> arrow.get('local') <Arrow [2013-05-07T22:57:11.793643-07:00]> >>> arrow.get('US/Pacific') <Arrow [2013-05-07T22:57:15.609802-07:00]> >>> arrow.get('-07:00') <Arrow [2013-05-07T22:57:22.777398-07:00]> >>> arrow.get(tz.tzlocal()) <Arrow [2013-05-07T22:57:28.484717-07:00]> **One** naive **datetime**, to get that datetime in UTC:: >>> arrow.get(datetime(2013, 5, 5)) <Arrow [2013-05-05T00:00:00+00:00]> **One** aware **datetime**, to get that datetime:: >>> arrow.get(datetime(2013, 5, 5, tzinfo=tz.tzlocal())) <Arrow [2013-05-05T00:00:00-07:00]> **Two** arguments, a naive or aware **datetime**, and a timezone expression (as above):: >>> arrow.get(datetime(2013, 5, 5), 'US/Pacific') <Arrow [2013-05-05T00:00:00-07:00]> **Two** arguments, both **str**, to parse the first according to the format of the second:: >>> arrow.get('2013-05-05 12:30:45', 'YYYY-MM-DD HH:mm:ss') <Arrow [2013-05-05T12:30:45+00:00]> **Three or more** arguments, as for the constructor of a **datetime**:: >>> arrow.get(2013, 5, 5, 12, 30, 45) <Arrow [2013-05-05T12:30:45+00:00]> ''' arg_count = len(args) if arg_count == 0: return Arrow.utcnow() if arg_count == 1: arg = args[0] timestamp = None try: timestamp = float(arg) except: pass # (int), (float), (str(int)) or (str(float)) -> from timestamp. if timestamp is not None: return Arrow.utcfromtimestamp(timestamp) # (datetime) -> from datetime. elif isinstance(arg, datetime): return Arrow.fromdatetime(arg) # (tzinfo) -> now, @ tzinfo. elif isinstance(arg, tzinfo): return Arrow.now(arg) # (str) -> now, @ tzinfo. elif isinstance(arg, str): _tzinfo = parser.TzinfoParser.parse(arg) return Arrow.now(_tzinfo) else: raise TypeError('Can\'t parse single argument type of \'{0}\''.format(type(arg))) elif arg_count == 2: arg_1, arg_2 = args[0], args[1] if isinstance(arg_1, datetime): # (datetime, tzinfo) -> fromdatetime @ tzinfo. if isinstance(arg_2, tzinfo): return Arrow.fromdatetime(arg_1, arg_2) # (datetime, str) -> fromdatetime @ tzinfo. elif isinstance(arg_2, str): _tzinfo = parser.TzinfoParser.parse(arg_2) return Arrow.fromdatetime(arg_1, _tzinfo) else: raise TypeError('Can\'t parse two arguments of types \'datetime\', \'{0}\''.format( type(arg_2))) # (str, format) -> parsed. elif isinstance(arg_1, str) and isinstance(arg_2, str): dt = parser.DateTimeParser.parse(args[0], args[1]) return Arrow.fromdatetime(dt) else: raise TypeError('Can\'t parse two arguments of types \'{0}\', \'{1}\''.format( type(arg_1), type(arg_2))) # 3+ args. else: return Arrow(*args, **kwargs)
def friendly_time(d: datetime.datetime) -> str: """Return "minutes ago" style date""" ad = Arrow.fromdatetime(d) other = Arrow.fromdatetime(datetime.datetime.utcnow()) return ad.humanize(other)