def call_resource(self, request): response = TrainingDiaryResponse() try: tdm = TrainingDataManager() tdm.delete_reading_for_primary_key(request.POST['primary_key']) response.set_status(response.SUCCESS) response.add_data('primary_key', request.POST['primary_key']) response.add_message( response.MSG_INFO, f"Reading Deleted: {request.POST['primary_key']}") except Exception as e: response.set_status(response.ERROR) response.add_message(response.MSG_ERROR, f"Delete failed: {str(e)}") return JsonResponse(data=response.as_dict())
def call_resource(self, request): date = request.POST['date'] readings = list() tdm = TrainingDataManager() for reading in json.loads(request.POST['json']): tdm.save_reading(date, reading['reading'], reading['value']) readings.append(reading['reading']) readings_saved = list() for r in readings: readings_saved.append( tdm.reading_for_date_and_type(date, r)[0].data_dictionary()) response = TrainingDiaryResponse() response.add_message(response.MSG_INFO, f"{len(readings)} saved: {', '.join(readings)}") response.add_data('readings', readings_saved) return JsonResponse(data=response.as_dict())
def call_resource(self, request): target_date = request.POST['date'] response = TrainingDiaryResponse() tdm = TrainingDataManager() if tdm.day_exists(target_date): tdm.update_day(target_date, request.POST['day_type'], request.POST['comments']) response.add_message(response.MSG_INFO, f"Day updated for {target_date}") else: tdm.save_day(target_date, request.POST['day_type'], request.POST['comments']) response.add_message(response.MSG_INFO, f"New day added for {target_date}") response.add_data('day', tdm.day_for_date(target_date).data_dictionary()) return JsonResponse(data=response.as_dict())
def call_resource(self, request): response = TrainingDiaryResponse() dd, errors = self._process_data(request.POST['json']) [response.add_message(response.MSG_ERROR, e) for e in errors] tdm = TrainingDataManager() primary_key = tdm.workout_primary_key(dd['date'], dd['workout_number']) try: if 'primary_key' not in dd or dd['primary_key'] == "": # new workout lastrowid = tdm.save_workout( **self._filtered_dict(dd, self.NEW_FIELDS)) response.add_message(response.MSG_INFO, f"New workout saved") response.add_data( 'workout', tdm.workout_for_rowid(lastrowid)[0].data_dictionary()) elif primary_key == dd['primary_key']: # update workout tdm.update_workout( **self._filtered_dict(dd, self.UPDATE_FIELDS)) response.add_message(response.MSG_INFO, f"Workout updated") response.add_data( 'workout', tdm.workout_for_primary_key(primary_key) [0].data_dictionary()) else: # changed the date. Need a new workout to get primary_keys right old_key = dd['primary_key'] tdm.delete_workout_for_primary_key(old_key) response.add_data('removed_primary_key', old_key) lastrowid = tdm.save_workout( **self._filtered_dict(dd, self.NEW_FIELDS)) workout = tdm.workout_for_rowid(lastrowid)[0] response.add_message( response.MSG_INFO, f"Workout date changed so old workout deleted and new one added. Remove {old_key} and added {workout.primary_key}" ) response.add_data('workout', workout.data_dictionary()) except TypeError as e: response.set_status(response.ERROR) response.add_message(response.MSG_ERROR, str(e)) return JsonResponse(data=response.as_dict())
def call_resource(self, request): period_year = Period(pandas_period=PandasPeriod.Y_DEC, aggregation=Aggregation.MEAN, to_date=False, incl_zeroes=False) series_definition = SeriesDefinition(period=period_year, rolling_definition=NoOpRoller()) time_series_sets = list() for reading in ReadingEnum: dd = DataDefinition(measure=reading.value, day_aggregation_method=DayAggregation.MEAN) time_series_sets.append(TimeSeriesManager.TimeSeriesSet(data_definition=dd, series_definition=series_definition, processor=TimeSeriesProcessor.get_processor("No-op"))) diary_time_period = TrainingDataManager().diary_time_period() # year summaries do need time period to be to year end diary_time_period.end = date(diary_time_period.end.year, 12, 31) tsl, errors = TimeSeriesManager().time_series_list(requested_time_period=diary_time_period, time_series_list=time_series_sets) if len(tsl) > 0: total_series = {'date': "Total"} for k in tsl[0].keys(): if k != 'date': entries = [d[k] for d in tsl] year_count = sum([1 for e in entries if e > 0]) total_series[k] = 0 if year_count == 0 else sum(entries) / year_count tsl.append(total_series) for dd in tsl: dd['name'] = dd['date'] response = TrainingDiaryResponse() [response.add_message(response.MSG_ERROR, e) for e in errors] response.add_data('time_series', tsl) return JsonResponse(data=response.as_dict())
def call_resource(self, request): response = TrainingDiaryResponse() year = int(request.POST['year']) tp = TimePeriod(date(year,1,1), date(year, 12,31)) swim_km = DataDefinition(activity='Swim', activity_type='All', equipment='All', measure='km', day_aggregation_method=DayAggregation.SUM) bike_km = DataDefinition(activity='Bike', activity_type='All', equipment='All', measure='km', day_aggregation_method=DayAggregation.SUM) run_km = DataDefinition(activity='Run', activity_type='All', equipment='All', measure='km', day_aggregation_method=DayAggregation.SUM) hours = DataDefinition(activity='All', activity_type='All', equipment='All', measure='hours', day_aggregation_method=DayAggregation.SUM) reps = DataDefinition(activity='Gym', activity_type='PressUp', equipment='All', measure='reps', day_aggregation_method=DayAggregation.SUM) series_defn = SeriesDefinition(period=Period(PandasPeriod(request.POST['period']))) summary = list() summary.append(TimeSeriesManager.TimeSeriesSet(data_definition=swim_km, series_definition=series_defn)) summary.append(TimeSeriesManager.TimeSeriesSet(data_definition=bike_km, series_definition=series_defn)) summary.append(TimeSeriesManager.TimeSeriesSet(data_definition=run_km, series_definition=series_defn)) summary.append(TimeSeriesManager.TimeSeriesSet(data_definition=hours, series_definition=series_defn)) summary.append(TimeSeriesManager.TimeSeriesSet(data_definition=reps, series_definition=series_defn)) dd, errors = TimeSeriesManager().time_series_list(tp, summary) response.add_data('time_series', sorted(dd, key=lambda x: x['date'])) [response.add_message(response.MSG_ERROR, error) for error in errors] return JsonResponse(data=response.as_dict())
def call_resource(self, request): dd, errors = self._process_data(request.POST['json']) dd_keys = set([d for d in dd.keys()]) data_definition = DataDefinition(activity=dd['activity'], activity_type=dd['activity_type'], equipment=dd['equipment'], measure=dd['measure'], day_aggregation_method=DayAggregation(dd['day_aggregation']), day_type=dd['day_type'], day_of_week=dd['day_of_week'], month=dd['month'], interpolation=dd['interpolation']) dd_keys.remove('activity') dd_keys.remove('activity_type') dd_keys.remove('equipment') dd_keys.remove('measure') dd_keys.remove('day_aggregation') dd_keys.remove('day_type') dd_keys.remove('day_of_week') dd_keys.remove('month') dd_keys.remove('interpolation') period = Period(pandas_period=PandasPeriod(dd['period']), aggregation=Aggregation(dd['period_aggregation']), to_date=dd['to_date'] == 'yes', incl_zeroes=dd['period_include_zeroes'] == 'yes') dd_keys.remove('period') dd_keys.remove('period_aggregation') dd_keys.remove('to_date') dd_keys.remove('period_include_zeroes') if dd['rolling'] == 'yes': rolling_definition = RollingDefinition(periods=int(dd['number_of_rolling_periods']), aggregation=Aggregation(dd['rolling_aggregation']), incl_zeros=dd['rolling_include_zeroes'] == 'yes') else: rolling_definition = NoOpRoller() dd_keys.remove('number_of_rolling_periods') dd_keys.remove('rolling_aggregation') dd_keys.remove('rolling_include_zeroes') dd_keys.remove('rolling') series_definition = SeriesDefinition(period=period, rolling_definition=rolling_definition) processor = self.get_processor(dd) dd_keys.remove('processor_type') response = TrainingDiaryResponse() [response.add_message(response.MSG_ERROR, e) for e in errors] diary_time_period = TrainingDataManager().diary_time_period() data_tp = TimePeriod(diary_time_period.start if dd['series_start'] is None else dd['series_start'], diary_time_period.end if dd['series_end'] is None else dd['series_end']) dd_keys.remove('series_start') dd_keys.remove('series_end') x_axis_number = 1 if 'x_axis_number' in dd: x_axis_number = dd['x_axis_number'] dd_keys.remove('x_axis_number') tss = TimeSeriesManager.TimeSeriesSet(data_definition, series_definition=series_definition, processor=processor, x_axis_number=x_axis_number) ts = TimeSeriesManager().time_series_graph(data_tp, [tss]) response.add_data('time_series', ts) if len(dd_keys) > 0: response.add_message(response.MSG_WARNING, f"The following data was not used: {' ,'.join(dd_keys)}") return JsonResponse(data=response.as_dict())