def test_current_worktime_daily(ya_metrika_logs_to_csv_notebook): from flowmaster.operators.etl.policy import ETLNotebook from flowmaster.operators.base.work import Work tz = "Europe/Moscow" ya_metrika_logs_to_csv_notebook.work.triggers.schedule = ( ETLNotebook.WorkPolicy.TriggersPolicy.SchedulePolicy( timezone=tz, start_time="01:00:00", from_date=None, interval="daily")) work = Work(ya_metrika_logs_to_csv_notebook) assert work.current_worktime == pendulum.yesterday(tz).replace(hour=1) ya_metrika_logs_to_csv_notebook.work.triggers.schedule = ( ETLNotebook.WorkPolicy.TriggersPolicy.SchedulePolicy( timezone="Europe/Moscow", start_time="01:00:00", from_date=pendulum.today("UTC") - dt.timedelta(5), interval="daily", )) work = Work(ya_metrika_logs_to_csv_notebook) assert work.current_worktime == pendulum.yesterday(tz).replace(hour=1)
def _get_date_parameters(current_date: str) -> Tuple[str, str]: start_date = pendulum.parse(current_date) end_date = pendulum.parse(current_date).add(months=1) if end_date > pendulum.yesterday(): end_date = pendulum.yesterday() return start_date.to_date_string(), end_date.to_date_string()
def test_worktime(): tz = "Europe/Moscow" CONFIG.work.schedule = ETLFlowConfig.WorkPolicy.SchedulePolicy( timezone=tz, start_time="01:00:00", from_date=None, interval="daily") work = Work(CONFIG) assert work.current_worktime == pendulum.yesterday(tz).replace(hour=1) CONFIG.work.schedule = ETLFlowConfig.WorkPolicy.SchedulePolicy( timezone="Europe/Moscow", start_time="01:00:00", from_date=dt.date.today() - dt.timedelta(5), interval="daily", ) work = Work(CONFIG) assert work.current_worktime == pendulum.yesterday(tz).replace(hour=1)
def test_next_page_token(self, requests_mock, stream_cls, expected): days_ago = pendulum.now().subtract(days=2) stream = stream_cls(days_ago.isoformat()) start = days_ago.strftime(stream.date_template) end = pendulum.yesterday().strftime(stream.date_template) url = f"{stream.url_base}{stream.path()}?start={start}&end={end}" # update expected with test values. expected.update( **{ "start": pendulum.yesterday().strftime(stream.date_template), "end": pendulum.now().strftime(stream.date_template) }) requests_mock.get(url) response = requests.get(url) assert stream.next_page_token(response) == expected
def steps(client: WithingsApi) -> t.Optional[int]: result = client.measure_get_activity( data_fields=[GetActivityField.STEPS], startdateymd=pendulum.yesterday(), enddateymd=pendulum.today(), ) yesterday = next( ( act for act in result.activities if act.date.day == pendulum.yesterday().day ), None, ) return yesterday.steps if yesterday else None
def generate(self): """Returns a list of dicts containing Azure VM Metrics Returns: list: A list of dicts containing metrics for an Azure VM """ if not self.__METRICS: metric_dict = {} yesterday = pendulum.yesterday() now = pendulum.now() for key, val in self.__METRIC_LIST.items(): for metric in val: time_stamp = yesterday timestamp_list = [] while time_stamp <= now: total_value = None if key == 'percent': total_value = round(float(self.random.randint(0, 100)),2) elif key == 'bytes': total_value = round(float(self.random.randint(50, 10000)),2) elif key == 'count_per_second': total_value = round(float(self.random.randint(50, 1000)), 2) elif key == 'count': total_value = round(float(self.random.uniform(0.0, 1000)), 2) else: pass time_stamp = time_stamp.add(hours=1) timestamp_list.append({ 'time_stamp': time_stamp.to_iso8601_string(), 'total': total_value, 'unit': key }) metric_dict[metric] = timestamp_list self.__METRICS = metric_dict return self.__METRICS
def _parse_optinput(self, ctx, optional_input): date = pendulum.now() append_team = "" timezone = "" args_dev = self._parseargs(optional_input) if args_dev.get('--tz'): # grab the user-defined timezone timezone = args_dev.get('--tz') # see if it's a short-hand timezone first timezone = self.short_tzs.get(timezone.lower()) or timezone # now check if it's valid try: _ = pendulum.timezone(timezone) except Exception: return if args_dev.get('extra_text', '').lower() == "yesterday": date = pendulum.yesterday().in_tz(timezone or self.default_other_tz) elif args_dev.get('extra_text', '').lower() == "tomorrow": date = pendulum.tomorrow().in_tz(timezone or self.default_other_tz) else: try: date = pendulum.parse(args_dev.get('extra_text'), strict=False) except Exception as err: LOGGER.error(f"[6] {err}") append_team = args_dev.get('extra_text').lower() return date, append_team, timezone, args_dev
def get_news_yandex(self): news = [] main_link = 'https://yandex.ru' response = requests.get(main_link+'/news', headers=self.header) dom = html.fromstring(response.text) links = dom.xpath("//td[@class='stories-set__item']") for link in links: new = {} new['link'] = main_link + link.xpath(".//h2/a/@href")[0] new['name'] = link.xpath(".//h2/a/text()")[0] text = link.xpath(".//div[@class='story__date']/text()")[0] source = '' lst = text.split() for i in lst: if i == 'вчера' or ':' in i: break else: source += i + " " source = source.strip() new['source'] = source if 'вчера' in text: new['date'] = pendulum.yesterday('Europe/Moscow').format('DD.MM.YYYY') else: new['date'] = pendulum.today('Europe/Moscow').format('DD.MM.YYYY') news.append(new) return news
def make_weekly_movie(cam: Cam, executor): root = Path(conf.root_dir) / 'data' / cam.name path = root / 'regular' / 'imgs' start = pendulum.yesterday() logger.info(f'Running make weekly movie for ww{start.week_of_year}') week_ago = start.subtract(weeks=1).date() sequence = [] morning = pendulum.Time(6) evening = pendulum.Time(18) for day in sorted(list(path.iterdir()), key=lambda x: pendulum.from_format(x.name, 'DD_MM_YYYY')): if pendulum.from_format(day.name, 'DD_MM_YYYY').date() > week_ago: for img in sorted(day.iterdir()): t_img = img.name.split('.')[0] t_img = pendulum.from_format(t_img, 'DD_MM_YYYY_HH-mm-ss').time() if morning < t_img < evening: sequence.append(str(img)) sequence = check_sequence_for_gray_images(sequence, executor) txt_clip = make_txt_movie(sequence, 100, executor) logger.info(f'Composing clip for weekly movie ww{start.week_of_year}') image_clip = ImageSequenceClip(sequence, fps=100) clip = CompositeVideoClip([image_clip, txt_clip.set_position(('right', 'top'))], use_bgclip=True) movie_path = root / 'regular' / 'weekly' / f'ww{start.week_of_year}.mp4' movie_path.parent.mkdir(parents=True, exist_ok=True) clip.write_videofile(str(movie_path), audio=False) logger.info(f'Finished with clip for weekly movie ww{start.week_of_year}') return Movie(clip.h, clip.w, movie_path, sequence[seq_middle(sequence)])
def log_query_relative_date(_string, _location, tokens): """Generate an SQL where clause for a date expressed via keyword. Recognized keywords are "today" and "yesterday". The SQL describes a range rather than a fixed day to account for timezone differences between the query and the source data. For example, "today" in local time is more like "today and a bit of tomorrow" in UTC. For performance, the query is structured to take advantage of an expression-based index. This only works when the query expression matches the expression used in the create index statement. """ if tokens[1] == "today": reference_date = pendulum.today() elif tokens[1] == "yesterday": reference_date = pendulum.yesterday() return "datestamp BETWEEN '{}' AND '{}'".format( reference_date.start_of('day').in_timezone('utc').format( 'YYYY-MM-DD-HH' ), reference_date.end_of('day').in_timezone('utc').format( 'YYYY-MM-DD-HH' ) )
def test_request_params(mocker, class_, cursor_field, date_only, additional_fields, retargeting, currency): timezone = "UTC" def __init__(self): self.api_token = "secret" self.timezone = pendulum.timezone("UTC") self.start_date = pendulum.yesterday(timezone) self.end_date = pendulum.today(timezone) mocker.patch.object(class_, "__init__", __init__) mocker.patch.object(class_, "cursor_field", cursor_field) stream = class_() start = pendulum.yesterday(timezone) end = pendulum.today(timezone) inputs = dict() inputs["stream_slice"] = {cursor_field: start, cursor_field + "_end": end} inputs["next_page_token"] = None inputs["stream_state"] = None expected_params = dict() expected_params["api_token"] = "secret" expected_params["timezone"] = timezone expected_params["maximum_rows"] = 1_000_000 expected_params["from"] = start.to_datetime_string() expected_params["to"] = end.to_datetime_string() if date_only: expected_params["from"] = start.to_date_string() expected_params["to"] = end.to_date_string() if additional_fields: expected_params["additional_fields"] = (",").join(additional_fields) if retargeting: expected_params["reattr"] = retargeting if currency: expected_params["currency"] = currency assert stream.request_params(**inputs) == expected_params
def test_default_specific_field_on_save(helpers): newDateTime = pendulum.yesterday() theRecord = pytest.app.records.create( **{"Required Date & Time": pendulum.now()}) theRecord["Specific Date & Time"] = newDateTime theRecord.save() assert theRecord["Specific Date & Time"] == newDateTime
class AuthorizeViewsTest(object): def test_anonymous(self, client, factory): model_name = factory._meta.model.__name__ view_name = "secure_share:{}Authorize".format(model_name) item = factory() url = resolve_url(view_name, item.pk) response = client.get(url) assert 200 == response.status_code def test_bad_password(self, client, factory): model_name = factory._meta.model.__name__ view_name = "secure_share:{}Authorize".format(model_name) item = factory() url = resolve_url(view_name, item.pk) response = client.post(url, data={'password': '******'}) assert response.status_code == 200 form = response.context_data.get('form') assert form.errors['password'] == ['Invalid password'] @pytest.mark.parametrize( "created, status", [(pendulum.yesterday(), 410), (pendulum.now().subtract(hours=23, minutes=59), 200)]) def test_gone(self, client, factory, created, status): model_name = factory._meta.model.__name__ view_name = "secure_share:{}Authorize".format(model_name) item = factory() # The model field as auto_now set, so we override a `created` value directly in the db factory._meta.model.objects.filter(pk=item.pk).update(created=created) url = resolve_url(view_name, item.pk) response = client.get(url) assert response.status_code == status
def update_completed_statistics(self): logs_num = len(self.completed_logs) statistics = {'today':0, 'yesterday':0, 'last_7':0, 'all':logs_num, 'days': 0, 'average_duration': 0} if logs_num == 0: self.completed_statistics = statistics return today = pendulum.today() today_completed = list(filter(lambda x: x.complete_time >= today, self.completed_logs)) statistics['today'] = len(today_completed) yesterday = pendulum.yesterday() yesterday_completed = list(filter(lambda x: x.complete_time >= yesterday and x.complete_time < today, self.completed_logs)) statistics['yesterday'] = len(yesterday_completed) last7 = pendulum.now().subtract(days=7) last7_completed = list(filter(lambda x: x.complete_time >= last7, self.completed_logs)) statistics['last_7'] = len(last7_completed) sorted_completed = sorted(self.completed_logs, key=lambda x: x.start_time) first_time = sorted_completed[0].start_time statistics['days'] = (pendulum.now() - first_time).days + 1 last7_completed_num = len(last7_completed) if last7_completed_num > 0: average_duration = sum(map(lambda x: x.total_time, last7_completed))/last7_completed_num statistics['average_duration'] = time_format(average_duration) else: statistics['average_duration'] = 'Nan' self.completed_statistics = statistics
def passes(self, attribute, key, dictionary): import pendulum try: return pendulum.parse(attribute, tz=self.tz) >= pendulum.yesterday() except pendulum.parsing.exceptions.ParserError: return False
def test_last_updated_field(helpers): datetimeValue = pendulum.yesterday() theRecord = pytest.app.records.create( **{ "Required Date & Time": pendulum.now(), "Last Updated": datetimeValue }) assert theRecord["Last Updated"] != datetimeValue
def get_date_params(stream_slice: Mapping[str, Any], cursor_field: str, end_date: pendulum.datetime = None): end_date = end_date or pendulum.yesterday() start_date = pendulum.parse(stream_slice.get(cursor_field)) if start_date > pendulum.now(): return start_date.to_date_string(), start_date.add(days=1).to_date_string() end_date = min(end_date, pendulum.parse(stream_slice.get(cursor_field)).add(months=1)) return start_date.add(days=1).to_date_string(), end_date.to_date_string()
def test_default_current_field(helpers): newDateTime = pendulum.yesterday() theRecord = pytest.app.records.create( **{ "Required Date & Time": pendulum.now(), "Default current Date & Time": newDateTime }) assert theRecord["Default current Date & Time"] == newDateTime
def test_get_end_date(self, stream_cls, expected): now = pendulum.now() yesterday = pendulum.yesterday() stream = stream_cls(yesterday.isoformat()) # update expected with test values. expected = now.strftime(stream.date_template) assert stream._get_end_date(yesterday).strftime( stream.date_template) == expected
def test_last_updated_field_on_save(helpers): datetimeValue = pendulum.yesterday() theRecord = pytest.app.records.create( **{"Required Date & Time": pendulum.now()}) theRecord["Last Updated"] = datetimeValue theRecord.save() assert theRecord["First Created"] < theRecord["Last Updated"] assert theRecord["Last Updated"] != datetimeValue
def artcom(self, info: base.InfoType) -> base.ArtcomType: if " commentary " not in f" {info['tag_string_meta']} " and \ " commentary_request " not in f" {info['tag_string_meta']} " and \ pend.parse(info["created_at"]) > pend.yesterday(): return [] return self._api("artist_commentaries.json", **{"search[post_id]": info["id"]})
def test_ordering(self): s1 = Status(user='******', _value=Status.ABERTA, _datetime=pendulum.yesterday()).save() Status(user='******', _value=Status.FECHADA, _datetime=pendulum.now()).save() self.assertAlmostEqual(Status.objects.count(), 2) self.assertEqual(Status.objects.first(), s1)
def test_parse_now(self): dt = pendulum.parse('now') assert dt.timezone_name == 'America/Toronto' mock_now = pendulum.yesterday() with pendulum.test(mock_now): assert pendulum.parse('now') == mock_now
def last_update(self, feed) -> pendulum.DateTime: """ Get the last updated date for the given feed """ try: obj = FeedState.get(feed) return obj.last_run except DoesNotExist: return pendulum.yesterday('UTC')
def test_get_last_by_subject(self): UpdateMessage(self.subject, [Session('test', [Amount(1000000000)])], pendulum.yesterday()).save() other = UpdateMessage(self.subject, [Session('test', [Amount(1100000000)])], pendulum.today()).save() last = UpdateMessage.get_last_by_subject(self.subject) self.assertEqual(other, last)
def test_ordering(self): c1 = CommandCall(user='******', _value=CommandCall.COACH, _datetime=pendulum.yesterday()).save() CommandCall(user='******', _value=CommandCall.COACH, _datetime=pendulum.now()).save() self.assertAlmostEqual(CommandCall.objects.count(), 2) self.assertEqual(CommandCall.objects.first(), c1)
def get_recommendation(self): recommendation = Recommendation.objects(owner=self).first() if not recommendation: recommendation = Recommendation( owner=self, user_ids=[], last_recommended_at=pendulum.yesterday().int_timestamp ) recommendation.save() recommendation.reload() return recommendation
def test_clima_ordering(self): CommandCall(user='******', _value=CommandCall.CLIMA, _datetime=pendulum.yesterday()).save() c2 = CommandCall(user='******', _value=CommandCall.CLIMA, _datetime=pendulum.now()).save() self.assertAlmostEqual(CommandCall.objects.count(), 2) self.assertEqual(CommandCall.last_clima(), c2)
def get_hash(user_id: str): today = str(pendulum.yesterday().date()) hash_today = int( hashlib.sha1( today.encode("utf-8")).hexdigest(), 16) % 10 ** 8 user_own_hash = int( hashlib.sha1( user_id.encode("utf-8")).hexdigest(), 16) % 10 ** 8 return hash_today + user_own_hash
def test_parse_now(): dt = pendulum.parse("now") assert dt.timezone_name == "America/Toronto" mock_now = pendulum.yesterday() with pendulum.test(mock_now): assert pendulum.parse("now") == mock_now
def test_database_notification_unread_state(self): notification = DatabaseNotification.create({ **notif_data, "read_at": pendulum.yesterday().to_datetime_string(), }) self.assertFalse(notification.is_unread) notification.read_at = None self.assertTrue(notification.is_unread)
def last_update(self, feed) -> pendulum.DateTime: """ Get the last updated date for the given feed """ datafile = os.path.join(self.basepath, f'last.{feed}.dat') try: with open(datafile, 'r') as f: return pendulum.parse(f.read().strip()) except IOError as e: log.debug(f"Error reading data file: {e}") return pendulum.yesterday('UTC')
def process_closed_briefs(): # find briefs that were closed yesterday. this task is designed to run after midnight. closed_briefs = ( db .session .query(Brief) .join(Framework) .filter( Brief.closed_at >= pendulum.yesterday(tz='Australia/Canberra'), Brief.closed_at < pendulum.today(tz='Australia/Canberra'), Brief.withdrawn_at.is_(None), Framework.slug == 'digital-marketplace' ) .all() ) for closed_brief in closed_briefs: send_brief_closed_email(closed_brief)
def get_active_date(log_records, query): """Figure out which date the query pertains to. This value is used by the calender widget in the UI. """ if log_records: return log_records[0]["unix_timestamp"] timezone = cherrypy.engine.publish( "registry:local_timezone" ).pop() date_string = None matches = re.match( r"date\s+(\d{4}-\d{2})", query ) if matches: date_string = matches.group(1) + "-01" matches = re.match( r"date\s+(\d{4}-\d{2}-\d{2})", query ) if matches: date_string = matches.group(1) if date_string: active_date = pendulum.parse( date_string, tz=timezone ) elif re.match(r"date\s+yesterday", query): active_date = pendulum.yesterday(tz=timezone) else: active_date = pendulum.today(tz=timezone) return active_date.start_of('day')
def test_yesterday(): now = pendulum.now().start_of("day") yesterday = pendulum.yesterday() assert isinstance(yesterday, DateTime) assert now.diff(yesterday, False).in_days() == -1