def test_create_view_with_conflicting_unique(self): end_time = get_utc_now() start_time = get_time_relative_units_ago(end_time, hours=8) post_data = {"start_time": start_time, "end_time": end_time} response = self.client_1.post(self.url, data=post_data) self.assertEqual(response.status_code, 200, response.data) data = response.data value_name = data["end_time"] value_parsed = parse_datetime_string(value_name) self.assertEqual(end_time, value_parsed) # post again, don't let you create something already made response = self.client_1.post(self.url, data=post_data) self.assertEqual(response.status_code, 400, response.data) data = response.data """ error_message should be {'non_field_errors': [ErrorDetail(string='The fields user, name must make a unique set.', code='unique')]} """ expected_error_found = "non_field_errors" in data self.assertTrue(expected_error_found)
def test_view_response_for_productivity(self): """ dpy test open.core.betterself.tests.views.test_overview_view.OverviewTestView.test_view_response_for_productivity --keepdb """ start_period = get_time_relative_units_ago(self.end_period, days=7) start_period_string = start_period.date().strftime(yyyy_mm_dd_format_1) # make sure that the start_period is in this test fixtures DailyProductivityLogFactory(user=self.user_1, date=start_period) kwargs = { "start_date": start_period_string, "end_date": self.end_period_date_string, } url = reverse(BetterSelfResourceConstants.OVERVIEW, kwargs=kwargs) response = self.client_1.get(url) self.assertEqual(response.status_code, 200) data = self.client_1.get(url).data data = data["productivity"] logs = data["logs"] self.assertTrue(logs) # a week should only have 7, don't include any it shouldn't contain logs_gte_7 = len(logs) <= 7 self.assertTrue(logs_gte_7, len(logs)) dates_in_logs = [item["date"] for item in logs] self.assertTrue(start_period_string in dates_in_logs)
def test_view_response_for_sleep_data(self): """ dpy test open.core.betterself.tests.views.test_overview_view.OverviewTestView.test_view_response_for_sleep_data --keepdb """ start_period = get_time_relative_units_ago(self.end_period, days=7) start_period_string = start_period.date().strftime(yyyy_mm_dd_format_1) end_date_string = self.end_period.date().strftime(yyyy_mm_dd_format_1) kwargs = { "start_date": start_period_string, "end_date": end_date_string } url = reverse(BetterSelfResourceConstants.OVERVIEW, kwargs=kwargs) response = self.client_1.get(url) self.assertEqual(response.status_code, 200) data = self.client_1.get(url).data data = data["sleep"] total_sleep_hours = data["total_duration_hours"] total_duration_minutes = data["total_duration_minutes"] # fixtures are random, but unlikely to sleep more than 10 hours a day self.assertTrue(30 < total_sleep_hours < 80) expected_sleep_minutes = total_sleep_hours * 60 self.assertAlmostEquals(total_duration_minutes, expected_sleep_minutes, 0)
def test_view_response_for_productivity_no_data(self): start_period = get_time_relative_units_ago(self.end_period, days=7) start_period_string = start_period.date().strftime(yyyy_mm_dd_format_1) kwargs = { "start_date": start_period_string, "end_date": self.end_period_date_string, } url = reverse(BetterSelfResourceConstants.OVERVIEW, kwargs=kwargs) data = self.client_1.get(url).data self.assertTrue("productivity" in data)
def test_create_view_with_blank_notes(self): end_time = get_utc_now() start_time = get_time_relative_units_ago(end_time, hours=8) # important: notes has to have a space when failing with api client! post_data = { "start_time": start_time, "end_time": end_time, "notes": " " } response = self.client_1.post(self.url, data=post_data) self.assertEqual(response.status_code, 200, response.data)
def test_create_view(self): end_time = get_utc_now() start_time = get_time_relative_units_ago(end_time, hours=8) post_data = {"start_time": start_time, "end_time": end_time} response = self.client_1.post(self.url, data=post_data) self.assertEqual(response.status_code, 200, response.data) data = response.data value_name = data["end_time"] value_parsed = parse_datetime_string(value_name) self.assertEqual(end_time, value_parsed)
def test_sql_query_count(self): # dpy test open.core.betterself.tests.views.test_overview_view.OverviewTestView.test_sql_query_count --keepdb start_period = get_time_relative_units_ago(self.end_period, years=1) start_period_string = start_period.date().strftime(yyyy_mm_dd_format_1) kwargs = { "start_date": start_period_string, "end_date": self.end_period_date_string, } url = reverse(BetterSelfResourceConstants.OVERVIEW, kwargs=kwargs) with self.assertNumQueriesLessThan(15): self.client_1.get(url)
def test_display_name_on_log_serializer_some_days_ago(self): supplement = SupplementFactory(user=self.user_1) utc_now = get_utc_now() time = get_time_relative_units_ago(utc_now, days=8.5) post_data = { "supplement_uuid": str(supplement.uuid), "time": time.isoformat(), "quantity": 5, } response = self.client_1.post(self.url, data=post_data) self.assertEqual(response.status_code, 200, response.data) display_name = response.data["display_name"] self.assertTrue("8.5 days ago" in display_name, display_name)
def setUpTestData(cls): user_1 = UserFactory() user_2 = UserFactory() cls.end_period = get_utc_now() cls.end_period_date_string = cls.end_period.date().strftime( yyyy_mm_dd_format_1) cls.start_period = get_time_relative_units_ago(cls.end_period, days=7) cls.start_period_date_string = cls.start_period.date().strftime( yyyy_mm_dd_format_1) supplements = SupplementFactory.create_batch(10, user=user_1) for index in range(100): # simulate some missing data if index % 5 == 0 and index != 0: continue date_to_use = cls.end_period - relativedelta.relativedelta( days=index) SleepLogFactory(end_time=date_to_use, user=user_1) for supplement in supplements: SupplementLogFactory.create_batch(2, user=user_1, supplement=supplement, time=date_to_use) cls.user_1_id = user_1.id cls.user_2_id = user_2.id for index in range(100): # simulate some missing data if index % 5 == 0 and index != 0: continue date_to_use = cls.end_period - relativedelta.relativedelta( days=index) DailyProductivityLogFactory(user=user_1, date=date_to_use) # add some random data to user_2 also to make sure no leaking DailyProductivityLogFactory(user=user_2, date=date_to_use)
def test_display_name_on_log_serializer_some_hours_ago(self): """ dpy test open.core.betterself.tests.views.test_supplement_log_views.TestSupplementLogViews.test_display_name_on_log_serializer_some_hours_ago --keepdb """ supplement = SupplementFactory(user=self.user_1) utc_now = get_utc_now() # if you adjust it this way, it should result in about 4.5 hours ago time = get_time_relative_units_ago(utc_now, hours=5.0) time = get_time_relative_units_forward(time, minutes=30) post_data = { "supplement_uuid": str(supplement.uuid), "time": time.isoformat(), "quantity": 5, } response = self.client_1.post(self.url, data=post_data) self.assertEqual(response.status_code, 200, response.data) display_name = response.data["display_name"] self.assertTrue("4.5 hours ago" in display_name, display_name)
def test_create_serializer_with_no_notes(self): """ This is a REALLY weird problem where requests not originating from tests are failing when blank is left empty """ end_time = get_utc_now() start_time = get_time_relative_units_ago(end_time, hours=8) # empty string with notes here will still fail if it's broken -- this is with an RequestFactory # to mimic how data is sent, i think APIClient serializes something post_data = { "start_time": start_time, "end_time": end_time, "notes": "" } context = create_api_request_context(self.url, self.user_1, post_data) serializer = SleepLogCreateUpdateSerializer(data=post_data, context=context) valid = serializer.is_valid() self.assertTrue(valid, serializer.errors)
def sleep_start_time(sleep_end_time): random_minutes = random.randint(1, 30) start_time = get_time_relative_units_ago(sleep_end_time, hours=8, minutes=random_minutes) return start_time
def get_overview_productivity_data( user, start_period: datetime, end_period: datetime, lookback_periods: int = 14 ): LOG_MODEL = DailyProductivityLog response = { "start_period": start_period.date().isoformat(), "end_period": end_period.date().isoformat(), "logs": [], "log_type": PRODUCTIVITY_LOG_TYPE, } # get more data, that way we can calculate a running historical average start_period_with_lookback = get_time_relative_units_ago( start_period, days=lookback_periods ) logs = LOG_MODEL.objects.filter( user=user, date__lte=end_period, date__gte=start_period_with_lookback ).order_by("date") if not logs.exists(): return response df = pd.DataFrame.from_records( logs.values(*PRODUCTIVITY_LOG_VALUE_FIELDS), index="date" ) df.index = pd.DatetimeIndex(df.index) formatted_dates = [item.strftime(yyyy_mm_dd_format_1) for item in df.index.date] df["date"] = formatted_dates # use this to truncate the resampled series original_index = df.index df["uuid"] = df["uuid"].astype(str) df = df.resample("D").first() # for metric in DailyProductivityLog.PRODUCTIVITY_METRICS: for metric in PRODUCTIVITY_METRICS: # create a new column on the df with the rolling_mean metric_mean_label = f"{metric}_mean" mean_series = df[metric].rolling(window=lookback_periods, min_periods=1).mean() df[metric_mean_label] = mean_series # now after we've calculated a bunch of rolling averages, truncate the missing days? # alternatively, maybe i don't do this and show the nan on missing charts, not sure yet df = df.loc[original_index] # just always make sure it's sorted by the ascending datetime df = df.sort_index() # truncate all the previous records you needed for rolling averages, use date() to remove tz info df = df.loc[start_period.date() :] # noqa serialized_output = df.to_json( orient="records", date_format="iso", double_precision=2 ) # i do this to get that beautiful serialization that pandas provides with date formatting and rounding # pandas is amazing, xoxo forever serialized_output = json.loads(serialized_output) response["logs"] = serialized_output return response
def create_demo_fixtures_for_user(user): username = user.username if "demo" not in username: raise ValueError( f"Cannot Run Demo Fixtures for Username without DEMO {username}") # wipe out all the previous models and start from scratch models_to_clean = [ Activity, ActivityLog, DailyProductivityLog, Ingredient, IngredientComposition, SleepLog, Supplement, SupplementLog, SupplementStack, SupplementStackComposition, WellBeingLog, FoodLog, Food, ] for model in models_to_clean: model.objects.filter(user=user).delete() # easier to see any row updates daily_logs_to_create = 30 nested_models_logs_to_create = 10 supplements_to_create = 15 sleep_logs_to_create = 90 activities_to_create = 40 activities = ActivityFactory.create_batch(activities_to_create, user=user) for activity in activities: ActivityLogFactory.create_batch(nested_models_logs_to_create, activity=activity, user=user) productivity_logs_to_create = 90 DailyProductivityLogFactory.create_batch(productivity_logs_to_create, user=user) supplements = SupplementFactory.create_batch(supplements_to_create, user=user) for supplement in supplements: SupplementLogFactory.create_batch(nested_models_logs_to_create, user=user, supplement=supplement) # ingredients = IngredientFactory.create_batch(fixtures_to_create, user=user) # # for ingredient in ingredients: # ingredient_composition = IngredientCompositionFactory( # ingredient=ingredient, user=user # ) # supplement = SupplementFactory.create( # user=user, # name=ingredient.name, # ingredient_compositions=[ingredient_composition], # ) # SupplementLogFactory.create_batch( # nested_models_logs_to_create, user=user, supplement=supplement # ) WellBeingLogFactory.create_batch(daily_logs_to_create, user=user) utc_now = get_utc_now() # do 2 days ago, that way you can create data faster when self-testing start_period = get_time_relative_units_ago(utc_now, days=2) sleep_dates = [] for index in range(sleep_logs_to_create): sleep_date = start_period - relativedelta.relativedelta(days=index) sleep_dates.append(sleep_date) for sleep_date in sleep_dates: SleepLogFactory(end_time=sleep_date, user=user) foods = FoodFactory.create_batch(daily_logs_to_create, user=user) for food in foods: FoodLogFactory.create_batch(nested_models_logs_to_create, food=food, user=user) logger.info(f"Successfully Created Demo Fixtures for {user.username}")