def test_invalid_for_educational_mismatch(self) -> None: self.group.ip_use.educational = True self.dbsession.add(self.group) self.dbsession.commit() schema = TaskScheduleItemSchema().bind(request=self.req) appstruct = { ViewParam.SCHEDULE_ID: self.schedule.id, ViewParam.TABLE_NAME: "mfi20", ViewParam.CLINICIAN_CONFIRMATION: True, ViewParam.DUE_FROM: Duration(days=0), ViewParam.DUE_WITHIN: Duration(days=10), } cstruct = schema.serialize(appstruct) # No real world example prohibits educational use mock_task_class = mock.Mock(prohibits_educational=True) with mock.patch.object(schema, "_get_task_class", return_value=mock_task_class): with self.assertRaises(Invalid) as cm: schema.deserialize(cstruct) self.assertIn("prohibits educational", cm.exception.messages()[0])
def pause_until(target, refresh: Duration = Duration(seconds=1)): begin = now() while target.is_future(): if begin.is_future(): raise SysTimeModified() else: sleep(refresh.in_seconds())
def test_iso_duration_field(self) -> None: id_colname = "id" duration_colname = "duration_iso" id_col = Column(id_colname, Integer, primary_key=True) duration_col = Column(duration_colname, PendulumDurationAsIsoTextColType) table = Table("testtable", self.meta, id_col, duration_col) table.create() d1 = Duration(years=1, months=3, seconds=3, microseconds=4) d2 = Duration(seconds=987.654321) d3 = Duration(days=-5) table.insert().values([ { id_colname: 1, duration_colname: d1 }, { id_colname: 2, duration_colname: d2 }, { id_colname: 3, duration_colname: d3 }, ]).execute() select_fields = [id_col, duration_col] rows = list( select(select_fields).select_from(table).order_by( id_col).execute()) self._assert_duration_equal(rows[0][duration_col], d1) self._assert_duration_equal(rows[1][duration_col], d2) self._assert_duration_equal(rows[2][duration_col], d3)
def test_serialize_deserialize(self) -> None: appstruct = { ViewParam.SCHEDULE_ID: 1, ViewParam.TABLE_NAME: "bmi", ViewParam.CLINICIAN_CONFIRMATION: False, ViewParam.DUE_FROM: Duration(days=90), ViewParam.DUE_WITHIN: Duration(days=100), } schema = TaskScheduleItemSchema().bind(request=self.req) self.serialize_deserialize(schema, appstruct)
def config(self): return SimulationConfig( duration=Duration(hours=24), market_count=4, slot_length=Duration(minutes=15), tick_length=Duration(seconds=15), cloud_coverage=ConstSettings.PVSettings.DEFAULT_POWER_PROFILE, market_maker_rate=ConstSettings.GeneralSettings. DEFAULT_MARKET_MAKER_RATE, iaa_fee=ConstSettings.IAASettings.FEE_PERCENTAGE)
def config(self): configuration = SimulationConfig( sim_duration=Duration(hours=24), market_count=4, slot_length=Duration(minutes=15), tick_length=Duration(seconds=15), cloud_coverage=ConstSettings.PVSettings.DEFAULT_POWER_PROFILE, market_maker_rate=ConstSettings.GeneralSettings. DEFAULT_MARKET_MAKER_RATE, external_connection_enabled=False) change_global_config(**configuration.__dict__) return configuration
def test_valid_for_clinician_task_with_confirmation(self) -> None: schema = TaskScheduleItemSchema().bind(request=mock.Mock()) appstruct = { ViewParam.SCHEDULE_ID: 1, ViewParam.TABLE_NAME: "elixhauserci", ViewParam.CLINICIAN_CONFIRMATION: True, ViewParam.DUE_FROM: Duration(days=90), ViewParam.DUE_WITHIN: Duration(days=100), } try: schema.serialize(appstruct) except Invalid: self.fail("Validation failed unexpectedly")
def test_invalid_for_negative_due_from(self) -> None: schema = TaskScheduleItemSchema().bind(request=self.req) appstruct = { ViewParam.SCHEDULE_ID: 1, ViewParam.TABLE_NAME: "phq9", ViewParam.CLINICIAN_CONFIRMATION: False, ViewParam.DUE_FROM: Duration(days=-1), ViewParam.DUE_WITHIN: Duration(days=10), } cstruct = schema.serialize(appstruct) with self.assertRaises(Invalid) as cm: schema.deserialize(cstruct) self.assertIn("must be zero or more days", cm.exception.messages()[0])
def fetch_access_token(self) -> None: """Fetch an access token to obtain Plan Finder data.""" # Construct request url: str = TOKEN_URLS[self.env] username: str = USERNAMES[self.env] key_id, key_secret = API_KEYS[self.env] body = { "userName": username, "scopes": "mpfpe_pde_full", "keyId": key_id, "keySecret": key_secret, } params = {} if self.env in ACS_PARAMS: params["ACS"] = ACS_PARAMS[self.env] # Submit HTTP POST request to obtain token logger.info(f"Fetching {self.env} access token") response: Response = requests.post(url, json=body, params=params) if response.status_code != 200: logger.error(Loader._request_details(response)) raise RuntimeError(f"Failed to fetch token: HTTP status {response.status_code}") # Extract token from response response_json: dict = response.json() access_token: str = response_json["accessToken"] expires: int = response_json["expires"] self.access_token = access_token self.access_token_expires = DateTime.now() + Duration(seconds=expires) logger.info(f"Fetched {self.env} access token; expires {self.access_token_expires}")
def test_invalid_for_clinician_task_with_no_confirmation(self) -> None: schema = TaskScheduleItemSchema().bind(request=self.req) appstruct = { ViewParam.SCHEDULE_ID: 1, ViewParam.TABLE_NAME: "elixhauserci", ViewParam.CLINICIAN_CONFIRMATION: False, ViewParam.DUE_FROM: Duration(days=90), ViewParam.DUE_WITHIN: Duration(days=100), } cstruct = schema.serialize(appstruct) with self.assertRaises(Invalid) as cm: schema.deserialize(cstruct) self.assertIn("you must tick 'Allow clinician tasks'", cm.exception.messages()[0])
def fetch_zip_file(self, plan_year: str, date: Date = Date.today()) -> Path: """Download a Plan Finder zip file for a given date.""" # If we don't have a current access token, fetch one no_access_token = self.access_token is None if no_access_token or DateTime.now() > (self.access_token_expires - Duration(minutes=5)): self.fetch_access_token() # Construct request url = DATA_URL headers = { "X-API-CONSUMER-ID": API_KEYS[self.env][0], "Authorization": f"Bearer {self.access_token}", } params = {"fileName": f"{plan_year}_{date.to_date_string()}"} # Submit GET request to download file logger.info(f"Fetching {self.env} zip file for plan year {plan_year} and date {date}") response = requests.get(url, headers=headers, params=params) if not response.status_code == 200: raise RuntimeError( "Failed to fetch zip file (this may be expected for dates with no data): HTTP " f"status {response.status_code}" ) # Save zip file to disk and return its path zip_bytes: bytes = response.content zip_file_path = DATA_DIR_PATH / f"{self.env}_{date}.zip" with open(zip_file_path, "wb") as zip_file: zip_file.write(zip_bytes) logger.info(f"Fetched {self.env} zip file: {zip_file_path}") return zip_file_path
def test_invalid_for_research_mismatch(self) -> None: self.group.ip_use.research = True self.dbsession.add(self.group) self.dbsession.commit() schema = TaskScheduleItemSchema().bind(request=self.req) appstruct = { ViewParam.SCHEDULE_ID: self.schedule.id, ViewParam.TABLE_NAME: "moca", ViewParam.CLINICIAN_CONFIRMATION: True, ViewParam.DUE_FROM: Duration(days=0), ViewParam.DUE_WITHIN: Duration(days=10), } cstruct = schema.serialize(appstruct) with self.assertRaises(Invalid) as cm: schema.deserialize(cstruct) self.assertIn("prohibits research", cm.exception.messages()[0])
def pendulum_duration_from_isodate_duration(dur: IsodateDuration) -> Duration: """ Converts a :class:`isodate.isoduration.Duration` into a :class:`pendulum.Duration`. Both :class:`isodate.isoduration.Duration` and :class:`pendulum.Duration` incorporate an internal representation of a :class:`datetime.timedelta` (weeks, days, hours, minutes, seconds, milliseconds, microseconds) and separate representations of years and months. The :class:`isodate.isoduration.Duration` year/month elements are both of type :class:`decimal.Decimal` -- although its ``str()`` representation converts these silently to integer, which is quite nasty. If you create a Pendulum Duration it normalizes within its timedelta parts, but not across years and months. That is obviously because neither years and months are of exactly fixed duration. Raises: :exc:`ValueError` if the year or month component is not an integer .. code-block:: python from cardinal_pythonlib.datetimefunc import pendulum_duration_from_isodate_duration from isodate.isoduration import Duration as IsodateDuration from pendulum import Duration as PendulumDuration td1 = IsodateDuration(days=5, hours=3, minutes=2, microseconds=5) d1 = pendulum_duration_from_isodate_duration(td1) td2 = IsodateDuration(microseconds=5010293989234) d2 = pendulum_duration_from_isodate_duration(td2) td3 = IsodateDuration(days=5000) d3 = pendulum_duration_from_isodate_duration(td3) td4 = IsodateDuration(days=5000, years=5, months=2) d4 = pendulum_duration_from_isodate_duration(td4) # ... doesn't normalize across years/months; see explanation above td5 = IsodateDuration(days=5000, years=5.1, months=2.2) d5 = pendulum_duration_from_isodate_duration(td5) # will raise """ # noqa y = dur.years if y.to_integral_value() != y: raise ValueError(f"Can't handle non-integer years {y!r}") m = dur.months if m.to_integral_value() != m: raise ValueError(f"Can't handle non-integer months {y!r}") return Duration(seconds=dur.tdelta.total_seconds(), years=int(y), months=int(m))
def test_serialize_valid_duration(self) -> None: duration = Duration(days=47) duration_type = DurationType() cstruct = duration_type.serialize(None, duration) # For type checker assert cstruct not in (null, ) cstruct: Dict[Any, Any] self.assertEqual(cstruct["days"], 3) self.assertEqual(cstruct["months"], 1) self.assertEqual(cstruct["weeks"], 2)
def test_invalid_for_missing_ip_use(self) -> None: self.group.ip_use = None self.dbsession.add(self.group) self.dbsession.commit() schema = TaskScheduleItemSchema().bind(request=self.req) appstruct = { ViewParam.SCHEDULE_ID: self.schedule.id, ViewParam.TABLE_NAME: "moca", ViewParam.CLINICIAN_CONFIRMATION: True, ViewParam.DUE_FROM: Duration(days=0), ViewParam.DUE_WITHIN: Duration(days=10), } cstruct = schema.serialize(appstruct) with self.assertRaises(Invalid) as cm: schema.deserialize(cstruct) self.assertIn( f"The group '{self.group.name}' has no intellectual property " f"settings", cm.exception.messages()[0], )
def _create_and_activate_strategy_area(self, strategy): self.config = MagicMock() self.config.max_panel_power_W = 160 GlobalConfig.end_date = GlobalConfig.start_date + Duration(days=1) self.area = Area(name="test_area", config=self.config, strategy=strategy) parent = Area(name="parent_area", children=[self.area]) parent.activate() strategy.connected = True market = MagicMock() market.time_slot = GlobalConfig.start_date parent.get_future_market_from_id = lambda _: market self.area.get_future_market_from_id = lambda _: market
def pendulum_duration_from_timedelta(td: datetime.timedelta) -> Duration: """ Converts a :class:`datetime.timedelta` into a :class:`pendulum.Duration`. .. code-block:: python from cardinal_pythonlib.datetimefunc import pendulum_duration_from_timedelta from datetime import timedelta from pendulum import Duration td1 = timedelta(days=5, hours=3, minutes=2, microseconds=5) d1 = pendulum_duration_from_timedelta(td1) td2 = timedelta(microseconds=5010293989234) d2 = pendulum_duration_from_timedelta(td2) td3 = timedelta(days=5000) d3 = pendulum_duration_from_timedelta(td3) """ # noqa return Duration(seconds=td.total_seconds())
from celery.schedules import crontab from pendulum import Duration # 定时任务相关 task_schedule = { "apps.sspanel.tasks.auto_reset_free_user_traffic_task": crontab(day_of_month=1, hour=0, minute=0), "apps.sspanel.tasks.reset_node_traffic_task": crontab(day_of_month=1, hour=0, minute=0), "apps.sspanel.tasks.check_user_state_task": Duration(minutes=1), "apps.sspanel.tasks.make_up_lost_order_task": Duration(seconds=15), "apps.sspanel.tasks.clean_traffic_log_task": Duration(days=1), "apps.sspanel.tasks.clean_online_ip_log_task": Duration(days=1), "apps.sspanel.tasks.clean_node_online_log_task": Duration(days=1), }
def is_old_hanging_draft(self, revision: dict) -> bool: """Determine whether a draft is at least a day old and unclosed.""" draft_created_at: DateTime = pendulum.parse(revision["resource"]["created_at"]) is_old_draft: bool = DateTime.now() - draft_created_at > Duration(days=1) is_hanging_draft: bool = revision["resource"]["closed_at"] is None return is_old_draft and is_hanging_draft
def pause_for(seconds: float): pause_until(now()+Duration(seconds=seconds))
def _assert_duration_equal(a: Duration, b: Duration) -> None: assert a.total_seconds() == b.total_seconds(), f"{a!r} != {b!r}"
from celery.schedules import crontab from pendulum import Duration # 定时任务相关 task_schedule = { "apps.sspanel.tasks.auto_reset_free_user_traffic_task": crontab(day_of_month=1, hour=0, minute=0), "apps.sspanel.tasks.reset_node_traffic_task": crontab(day_of_month=1, hour=0, minute=0), "apps.sspanel.tasks.check_user_state_task": Duration(minutes=1), "apps.sspanel.tasks.make_up_lost_order_task": Duration(seconds=15), "apps.sspanel.tasks.clean_traffic_log_task": Duration(minutes=1), "apps.sspanel.tasks.clean_online_ip_log_task": Duration(minutes=1), "apps.sspanel.tasks.clean_node_online_log_task": Duration(minutes=1), "apps.sspanel.tasks.clean_user_sub_log_task": Duration(minutes=1), # stats "apps.stats.tasks.gen_daily_stats_task": Duration(minutes=10), }
def duration_to_iso(d: Duration, permit_years_months: bool = True, minus_sign_at_front: bool = True) -> str: """ Converts a :class:`pendulum.Duration` into an ISO-8601 formatted string. Args: d: the duration permit_years_months: - if ``False``, durations with non-zero year or month components will raise a :exc:`ValueError`; otherwise, the ISO format will always be ``PT<seconds>S``. - if ``True``, year/month components will be accepted, and the ISO format will be ``P<years>Y<months>MT<seconds>S``. minus_sign_at_front: Applies to negative durations, which probably aren't part of the ISO standard. - if ``True``, the format ``-P<positive_duration>`` is used, i.e. with a minus sign at the front and individual components positive. - if ``False``, the format ``PT-<positive_seconds>S`` (etc.) is used, i.e. with a minus sign for each component. This format is not re-parsed successfully by ``isodate`` and will therefore fail :func:`duration_from_iso`. Raises: :exc:`ValueError` for bad input The maximum length of the resulting string (see test code below) is: - 21 if years/months are not permitted; - ill-defined if years/months are permitted, but 29 for much more than is realistic (negative, 1000 years, 11 months, and the maximum length for seconds/microseconds). .. code-block:: python from pendulum import DateTime, Duration from cardinal_pythonlib.datetimefunc import duration_from_iso, duration_to_iso from cardinal_pythonlib.logs import main_only_quicksetup_rootlogger main_only_quicksetup_rootlogger() d1 = duration_from_iso("P5W") d2 = duration_from_iso("P3Y1DT3H1M2S") d3 = duration_from_iso("P7000D") d4 = duration_from_iso("P1Y7000D") d5 = duration_from_iso("PT10053.22S") print(duration_to_iso(d1)) print(duration_to_iso(d2)) print(duration_to_iso(d3)) print(duration_to_iso(d4)) print(duration_to_iso(d5)) assert d1 == duration_from_iso(duration_to_iso(d1)) assert d2 == duration_from_iso(duration_to_iso(d2)) assert d3 == duration_from_iso(duration_to_iso(d3)) assert d4 == duration_from_iso(duration_to_iso(d4)) assert d5 == duration_from_iso(duration_to_iso(d5)) strmin = duration_to_iso(Duration.min) # '-P0Y0MT86399999913600.0S' strmax = duration_to_iso(Duration.max) # 'P0Y0MT86400000000000.0S' duration_from_iso(strmin) # raises ISO8601Error from isodate package (bug?) duration_from_iso(strmax) # raises OverflowError from isodate package print(strmin) # P0Y0MT-86399999913600.0S print(strmax) # P0Y0MT86400000000000.0S d6 = duration_from_iso("P100Y999MT86400000000000.0S") # OverflowError d7 = duration_from_iso("P0Y1MT86400000000000.0S") # OverflowError d8 = duration_from_iso("P0Y1111111111111111MT76400000000000.0S") # accepted! # ... length e.g. 38; see len(duration_to_iso(d8)) # So the maximum string length may be ill-defined if years/months are # permitted (since Python 3 integers are unbounded; try 99 ** 10000). # But otherwise: d9longest = duration_from_iso("-P0Y0MT10000000000000.000009S") d10toolong = duration_from_iso("-P0Y0MT100000000000000.000009S") # fails, too many days assert d9longest == duration_from_iso(duration_to_iso(d9longest)) d11longest_with_us = duration_from_iso("-P0Y0MT1000000000.000009S") # microseconds correct d12toolong_rounds_us = duration_from_iso("-P0Y0MT10000000000.000009S") # error in microseconds d13toolong_drops_us = duration_from_iso("-P0Y0MT10000000000000.000009S") # drops microseconds (within datetime.timedelta) d14toolong_parse_fails = duration_from_iso("-P0Y0MT100000000000000.000009S") # fails, too many days assert d11longest_with_us == duration_from_iso(duration_to_iso(d11longest_with_us)) assert d12toolong_rounds_us == duration_from_iso(duration_to_iso(d12toolong_rounds_us)) assert d13toolong_drops_us == duration_from_iso(duration_to_iso(d13toolong_drops_us)) longest_without_ym = duration_to_iso(d11longest_with_us, permit_years_months=False) print(longest_without_ym) # -PT1000000000.000009S print(len(longest_without_ym)) # 21 d15longest_realistic_with_ym_us = duration_from_iso("-P1000Y11MT1000000000.000009S") # microseconds correct longest_realistic_with_ym = duration_to_iso(d15longest_realistic_with_ym_us) print(longest_realistic_with_ym) # -P1000Y11MT1000000000.000009S print(len(longest_realistic_with_ym)) # 29 # Now, double-check how the Pendulum classes handle year/month # calculations: basedate1 = DateTime(year=2000, month=1, day=1) # 2000-01-01 print(basedate1 + Duration(years=1)) # 2001-01-01; OK print(basedate1 + Duration(months=1)) # 2000-02-01; OK basedate2 = DateTime(year=2004, month=2, day=1) # 2004-02-01; leap year print(basedate2 + Duration(years=1)) # 2005-01-01; OK print(basedate2 + Duration(months=1)) # 2000-03-01; OK print(basedate2 + Duration(months=1, days=1)) # 2000-03-02; OK """ # noqa prefix = "" negative = d < Duration() if negative and minus_sign_at_front: prefix = "-" d = -d if permit_years_months: return prefix + "P{years}Y{months}MT{seconds}S".format( years=d.years, months=d.months, seconds=d.total_seconds(), # float ) else: if d.years != 0: raise ValueError(f"Duration has non-zero years: {d.years!r}") if d.months != 0: raise ValueError(f"Duration has non-zero months: {d.months!r}") return prefix + f"PT{d.total_seconds()}S"
def test_returns_task_schedules(self) -> None: from pendulum import DateTime as Pendulum, Duration, local, parse from camcops_server.cc_modules.cc_taskindex import ( PatientIdNumIndexEntry, TaskIndexEntry, ) from camcops_server.cc_modules.cc_taskschedule import ( PatientTaskSchedule, TaskSchedule, TaskScheduleItem, ) from camcops_server.tasks.bmi import Bmi schedule1 = TaskSchedule() schedule1.group_id = self.group.id schedule1.name = "Test 1" self.dbsession.add(schedule1) schedule2 = TaskSchedule() schedule2.group_id = self.group.id self.dbsession.add(schedule2) self.dbsession.commit() item1 = TaskScheduleItem() item1.schedule_id = schedule1.id item1.task_table_name = "phq9" item1.due_from = Duration(days=0) item1.due_by = Duration(days=7) self.dbsession.add(item1) item2 = TaskScheduleItem() item2.schedule_id = schedule1.id item2.task_table_name = "bmi" item2.due_from = Duration(days=0) item2.due_by = Duration(days=8) self.dbsession.add(item2) item3 = TaskScheduleItem() item3.schedule_id = schedule1.id item3.task_table_name = "phq9" item3.due_from = Duration(days=30) item3.due_by = Duration(days=37) self.dbsession.add(item3) item4 = TaskScheduleItem() item4.schedule_id = schedule1.id item4.task_table_name = "gmcpq" item4.due_from = Duration(days=30) item4.due_by = Duration(days=38) self.dbsession.add(item4) self.dbsession.commit() patient = self.create_patient() idnum = self.create_patient_idnum( patient_id=patient.id, which_idnum=self.nhs_iddef.which_idnum, idnum_value=TEST_NHS_NUMBER, ) PatientIdNumIndexEntry.index_idnum(idnum, self.dbsession) server_patient = self.create_patient(as_server_patient=True) _ = self.create_patient_idnum( patient_id=server_patient.id, which_idnum=self.nhs_iddef.which_idnum, idnum_value=TEST_NHS_NUMBER, as_server_patient=True, ) schedule_1 = PatientTaskSchedule() schedule_1.patient_pk = server_patient.pk schedule_1.schedule_id = schedule1.id schedule_1.settings = { "bmi": {"bmi_key": "bmi_value"}, "phq9": {"phq9_key": "phq9_value"}, } schedule_1.start_datetime = local(2020, 7, 31) self.dbsession.add(schedule_1) schedule_2 = PatientTaskSchedule() schedule_2.patient_pk = server_patient.pk schedule_2.schedule_id = schedule2.id self.dbsession.add(schedule_2) bmi = Bmi() self.apply_standard_task_fields(bmi) bmi.id = 1 bmi.height_m = 1.83 bmi.mass_kg = 67.57 bmi.patient_id = patient.id bmi.when_created = local(2020, 8, 1) self.dbsession.add(bmi) self.dbsession.commit() self.assertTrue(bmi.is_complete()) TaskIndexEntry.index_task( bmi, self.dbsession, indexed_at_utc=Pendulum.utcnow() ) self.dbsession.commit() proquint = server_patient.uuid_as_proquint # For type checker assert proquint is not None assert self.other_device.name is not None self.req.fake_request_post_from_dict( { TabletParam.CAMCOPS_VERSION: MINIMUM_TABLET_VERSION, TabletParam.DEVICE: self.other_device.name, TabletParam.OPERATION: Operations.GET_TASK_SCHEDULES, TabletParam.PATIENT_PROQUINT: proquint, } ) response = client_api(self.req) reply_dict = get_reply_dict_from_response(response) self.assertEqual( reply_dict[TabletParam.SUCCESS], SUCCESS_CODE, msg=reply_dict ) task_schedules = json.loads(reply_dict[TabletParam.TASK_SCHEDULES]) self.assertEqual(len(task_schedules), 2) s = task_schedules[0] self.assertEqual(s[TabletParam.TASK_SCHEDULE_NAME], "Test 1") schedule_items = s[TabletParam.TASK_SCHEDULE_ITEMS] self.assertEqual(len(schedule_items), 4) phq9_1_sched = schedule_items[0] self.assertEqual(phq9_1_sched[TabletParam.TABLE], "phq9") self.assertEqual( phq9_1_sched[TabletParam.SETTINGS], {"phq9_key": "phq9_value"} ) self.assertEqual( parse(phq9_1_sched[TabletParam.DUE_FROM]), local(2020, 7, 31) ) self.assertEqual( parse(phq9_1_sched[TabletParam.DUE_BY]), local(2020, 8, 7) ) self.assertFalse(phq9_1_sched[TabletParam.COMPLETE]) self.assertFalse(phq9_1_sched[TabletParam.ANONYMOUS]) bmi_sched = schedule_items[1] self.assertEqual(bmi_sched[TabletParam.TABLE], "bmi") self.assertEqual( bmi_sched[TabletParam.SETTINGS], {"bmi_key": "bmi_value"} ) self.assertEqual( parse(bmi_sched[TabletParam.DUE_FROM]), local(2020, 7, 31) ) self.assertEqual( parse(bmi_sched[TabletParam.DUE_BY]), local(2020, 8, 8) ) self.assertTrue(bmi_sched[TabletParam.COMPLETE]) self.assertFalse(bmi_sched[TabletParam.ANONYMOUS]) phq9_2_sched = schedule_items[2] self.assertEqual(phq9_2_sched[TabletParam.TABLE], "phq9") self.assertEqual( phq9_2_sched[TabletParam.SETTINGS], {"phq9_key": "phq9_value"} ) self.assertEqual( parse(phq9_2_sched[TabletParam.DUE_FROM]), local(2020, 8, 30) ) self.assertEqual( parse(phq9_2_sched[TabletParam.DUE_BY]), local(2020, 9, 6) ) self.assertFalse(phq9_2_sched[TabletParam.COMPLETE]) self.assertFalse(phq9_2_sched[TabletParam.ANONYMOUS]) # GMCPQ gmcpq_sched = schedule_items[3] self.assertTrue(gmcpq_sched[TabletParam.ANONYMOUS])
def test_deleting_deletes_related_objects(self) -> None: schedule = TaskSchedule() schedule.group_id = self.group.id self.dbsession.add(schedule) self.dbsession.flush() item = TaskScheduleItem() item.schedule_id = schedule.id item.task_table_name = "ace3" item.due_from = Duration(days=30) item.due_by = Duration(days=60) self.dbsession.add(item) self.dbsession.flush() patient = self.create_patient() pts = PatientTaskSchedule() pts.schedule_id = schedule.id pts.patient_pk = patient.pk self.dbsession.add(pts) self.dbsession.flush() email = Email() self.dbsession.add(email) self.dbsession.flush() pts_email = PatientTaskScheduleEmail() pts_email.email_id = email.id pts_email.patient_task_schedule_id = pts.id self.dbsession.add(pts_email) self.dbsession.commit() self.assertIsNotNone( self.dbsession.query(TaskScheduleItem).filter( TaskScheduleItem.id == item.id).one_or_none()) self.assertIsNotNone( self.dbsession.query(PatientTaskSchedule).filter( PatientTaskSchedule.id == pts.id).one_or_none()) self.assertIsNotNone( self.dbsession.query(PatientTaskScheduleEmail).filter( PatientTaskScheduleEmail.patient_task_schedule_id == pts.id).one_or_none()) self.assertIsNotNone( self.dbsession.query(Email).filter( Email.id == email.id).one_or_none()) self.dbsession.delete(schedule) self.dbsession.commit() self.assertIsNone( self.dbsession.query(TaskScheduleItem).filter( TaskScheduleItem.id == item.id).one_or_none()) self.assertIsNone( self.dbsession.query(PatientTaskSchedule).filter( PatientTaskSchedule.id == pts.id).one_or_none()) self.assertIsNone( self.dbsession.query(PatientTaskScheduleEmail).filter( PatientTaskScheduleEmail.patient_task_schedule_id == pts.id).one_or_none()) self.assertIsNone( self.dbsession.query(Email).filter( Email.id == email.id).one_or_none())
def test_due_within_calculated_when_missing_due_from(self) -> None: item = TaskScheduleItem() item.due_by = Duration(days=30) self.assertEqual(item.due_within.in_days(), 30)
def test_due_within_is_none_when_missing_due_by(self) -> None: item = TaskScheduleItem() item.due_from = Duration(days=30) self.assertIsNone(item.due_within)
def test_due_within_calculated_from_due_by_and_due_from(self) -> None: item = TaskScheduleItem() item.due_from = Duration(days=30) item.due_by = Duration(days=50) self.assertEqual(item.due_within.in_days(), 20)
def test_description_shows_shortname_and_number_of_days(self) -> None: item = TaskScheduleItem() item.task_table_name = "bmi" item.due_from = Duration(days=30) self.assertEqual(item.description(self.req), "BMI @ 30 days")