def edit_reading(post_dictionary): models_dict = parse_post_dictionary(post_dictionary) tdm = TrainingDataManager() response_dict = dict() data = list() field_errors_list = list() primary_key = None for k, v in models_dict.items(): primary_key = k for field, value in v.items(): if field == 'value': tdm.update_reading_for_primary_key(k,value) else: field_errors_list.append({'name': 'value', 'status': f"invalid field name: {field}"}) if primary_key is not None: reading = tdm.reading_for_primary_key(primary_key) data.append(reading.data_dictionary()) else: response_dict['error'] = "No primary key" if len(data) > 0: response_dict['data'] = data if len(field_errors_list) > 0: response_dict['fieldErrors'] = field_errors_list return JsonResponse(data=response_dict)
def __init__(self, activity='All', activity_type='All', equipment='All', measure='km', day_aggregation_method=DayAggregation.SUM, day_of_week='All', month='All', day_type='All', interpolation='none') -> object: self.activity = activity self.activity_type = activity_type self.equipment = equipment self.measure = measure self.day_aggregation_method = day_aggregation_method self.day_of_week = day_of_week self.sql_day_of_week = self.DAY_TO_SQL_NUMBER[day_of_week] self.month = month self.sql_month = self.MONTH_TO_SQL_NUMBER[month] self.day_type = day_type self.converter = measure_converter(measure) self.target_measure = measure if self.converter is None else self.converter.underlying_measure() self.interpolation = interpolation self.tdm = TrainingDataManager()
def sql(self, time_period) -> str: tdm = TrainingDataManager(); table = tdm.table_for_measure(self.target_measure) # SELECT sql = self.select_clause(table) sql += f" FROM {table} " # INNER JOIN TO DATE sql += self.inner_join(table) sql += f"WHERE " if table == 'Reading': sql += self.where_clause_reading() else: sql += self.where_clause_workout() sql += f"{table}.date BETWEEN '{time_period.start}' and '{time_period.end}' GROUP BY {table}.date" return sql
def call_resource(self, request): response = TrainingDiaryResponse() try: tdm = TrainingDataManager() tdm.delete_reading_for_primary_key(request.POST['primary_key']) response.set_status(response.SUCCESS) response.add_data('primary_key', request.POST['primary_key']) response.add_message( response.MSG_INFO, f"Reading Deleted: {request.POST['primary_key']}") except Exception as e: response.set_status(response.ERROR) response.add_message(response.MSG_ERROR, f"Delete failed: {str(e)}") return JsonResponse(data=response.as_dict())
def populate_workouts(self): from workoutentry.training_data import TrainingDataManager self.workouts = TrainingDataManager().workouts_on_date(self.date) self.workout_count = len(self.workouts) if self.workout_count > 0: self.tss = functools.reduce(operator.add, [w.tss for w in self.workouts]) self.training_duration = functools.reduce( operator.add, [w.seconds for w in self.workouts]) swim_workouts = list( filter(lambda x: x.activity == 'Swim', self.workouts)) bike_workouts = list( filter(lambda x: x.activity == 'Bike', self.workouts)) run_workouts = list( filter(lambda x: x.activity == 'Run', self.workouts)) self.swim_km = 0.0 self.bike_km = 0.0 self.run_km = 0.0 if len(swim_workouts) > 0: self.swim_km = functools.reduce(operator.add, [w.km for w in swim_workouts]) if len(bike_workouts) > 0: self.bike_km = functools.reduce(operator.add, [w.km for w in bike_workouts]) if len(run_workouts) > 0: self.run_km = functools.reduce(operator.add, [w.km for w in run_workouts])
def call_resource(self, request): response = TrainingDiaryResponse() last_date = TrainingDataManager().latest_date() d = parser.parse(last_date).date() next_date = d + timedelta(1) response.add_data('next_date', next_date) return JsonResponse(data=response.as_dict())
def call_resource(self, request): readings = TrainingDataManager().reading_types_unused_for_date( request.POST['date']) response = TrainingDiaryResponse() response.add_data('readings', readings) return JsonResponse(data=response.as_dict())
def generate_from_date(self, from_date, print_progress=False): start_date_str = from_date if from_date is None or from_date == '': from . import DataWarehouse start_date_str = DataWarehouse.instance().max_date() if start_date_str is None: # no data warehouse entries so start from first date we have in TrainingData start_date_str = TrainingDataManager().earliest_date() else: # start the day after the last date in the warehouse start_date_str = str( dateutil.parser.parse(start_date_str).date() + datetime.timedelta(days=1)) self.generate(from_date=start_date_str, to_date=TrainingDataManager().latest_date(), print_progress=print_progress)
def call_resource(self, request): response = TrainingDiaryResponse() tdm = TrainingDataManager() instances = [] if request.POST['data_type'] == 'Day': instances = tdm.days_between(request.POST['from_date'], request.POST['to_date']) elif request.POST['data_type'] == 'Reading': instances = tdm.readings_between(request.POST['from_date'], request.POST['to_date']) elif request.POST['data_type'] == 'Workout': instances = tdm.workouts_between(request.POST['from_date'], request.POST['to_date']) response.add_data('instances', [i.data_dictionary() for i in instances]) return JsonResponse(data=response.as_dict())
def call_resource(self, request): date = request.POST['date'] readings = list() tdm = TrainingDataManager() for reading in json.loads(request.POST['json']): tdm.save_reading(date, reading['reading'], reading['value']) readings.append(reading['reading']) readings_saved = list() for r in readings: readings_saved.append( tdm.reading_for_date_and_type(date, r)[0].data_dictionary()) response = TrainingDiaryResponse() response.add_message(response.MSG_INFO, f"{len(readings)} saved: {', '.join(readings)}") response.add_data('readings', readings_saved) return JsonResponse(data=response.as_dict())
def call_resource(self, request): response = TrainingDiaryResponse() tdm = TrainingDataManager() summary = tdm.training_annual_summary() totals = {"name": 'Total'} years = list() for yr, v in summary.items(): for activity, dd in v.items(): a_dd = totals.get(activity, {'km': 0, 'seconds': 0, 'tss': 0}) a_dd['km'] += dd['km'] a_dd['seconds'] += dd['seconds'] a_dd['tss'] += dd['tss'] totals[activity] = a_dd v['name'] = yr years.append(v) years.append(totals) # response.add_data('summary', summary) response.add_data('years', years) return JsonResponse(data=response.as_dict())
def call_resource(self, request): response = TrainingDiaryResponse() tdm = TrainingDataManager() summary = tdm.bike_summary() bikes = list() totals = {'name': 'Total'} for bike, years in summary.items(): for year, value in years.items(): y = totals.get(year, 0) y += value totals[year] = y years['name'] = bike bikes.append(years) bikes.append(totals) response.add_data('bikes', bikes) years = list() for k, v in summary.items(): years = [y for y in sorted(v.keys(), reverse=True)] break response.add_data('years', years) return JsonResponse(data=response.as_dict())
def call_resource(self, request): period_year = Period(pandas_period=PandasPeriod.Y_DEC, aggregation=Aggregation.MEAN, to_date=False, incl_zeroes=False) series_definition = SeriesDefinition(period=period_year, rolling_definition=NoOpRoller()) time_series_sets = list() for reading in ReadingEnum: dd = DataDefinition(measure=reading.value, day_aggregation_method=DayAggregation.MEAN) time_series_sets.append(TimeSeriesManager.TimeSeriesSet(data_definition=dd, series_definition=series_definition, processor=TimeSeriesProcessor.get_processor("No-op"))) diary_time_period = TrainingDataManager().diary_time_period() # year summaries do need time period to be to year end diary_time_period.end = date(diary_time_period.end.year, 12, 31) tsl, errors = TimeSeriesManager().time_series_list(requested_time_period=diary_time_period, time_series_list=time_series_sets) if len(tsl) > 0: total_series = {'date': "Total"} for k in tsl[0].keys(): if k != 'date': entries = [d[k] for d in tsl] year_count = sum([1 for e in entries if e > 0]) total_series[k] = 0 if year_count == 0 else sum(entries) / year_count tsl.append(total_series) for dd in tsl: dd['name'] = dd['date'] response = TrainingDiaryResponse() [response.add_message(response.MSG_ERROR, e) for e in errors] response.add_data('time_series', tsl) return JsonResponse(data=response.as_dict())
def update_days(self, from_date, to_date, print_progress=False): start = datetime.datetime.now() start_date, end_date = self.__correct_bounds(from_date=from_date, to_date=to_date) self.__delete_entries_in_range(start_date, end_date) current_date = dateutil.parser.parse(start_date).date() last_date = dateutil.parser.parse(end_date).date() print(f"Populating for days") tables = self.__tables_dict() count = 0 while current_date <= last_date: count += 1 d = TrainingDataManager().day_for_date(current_date) # create new tables as required if d is None: current_date = current_date + datetime.timedelta(days=1) continue for t in d.workout_types(): table_name = f"day_{str(t)}" if table_name not in tables: try: self.__create_table(table_name, t, d.date) tables[table_name] = t except Exception as e: if print_progress: print(f'Table probably exists so continuing. {e}') pass # add row for this day to all existing tables for key, value in tables.items(): self.__insert_row(key, value, d) if print_progress or count % 100 == 0: print(f'{count} - {datetime.datetime.now() - start} {d.date}', end='\r') current_date = current_date + datetime.timedelta(days=1)
def call_resource(self, request): target_date = request.POST['date'] response = TrainingDiaryResponse() tdm = TrainingDataManager() if tdm.day_exists(target_date): tdm.update_day(target_date, request.POST['day_type'], request.POST['comments']) response.add_message(response.MSG_INFO, f"Day updated for {target_date}") else: tdm.save_day(target_date, request.POST['day_type'], request.POST['comments']) response.add_message(response.MSG_INFO, f"New day added for {target_date}") response.add_data('day', tdm.day_for_date(target_date).data_dictionary()) return JsonResponse(data=response.as_dict())
def call_resource(self, request): type = request.POST['type'] tdm = TrainingDataManager() choices = list() if type == 'activity': choices = tdm.activities() elif type == 'activityType': choices = tdm.activity_types() elif type == 'equipment': choices = tdm.equipment_types() elif type == 'tssMethod': choices = tdm.tss_methods() elif type == 'dayType': choices = tdm.day_types() elif type == 'measure': choices = [m.value for m in WorkoutFloatMeasureEnum] + [m.value for m in ReadingEnum] elif type == 'generated_measure': choices = [m for m in TimeSeriesProcessor.generated_measures()] elif type == 'period': choices = [p.value for p in PandasPeriod] elif type == 'aggregation': choices = [a.value for a in Aggregation] elif type == 'day_aggregation': choices = [a.value for a in DayAggregation] elif type == 'years': choices = range(dateutil.parser.parse(tdm.earliest_date()).year, dateutil.parser.parse(tdm.latest_date()).year + 1) elif type == 'processor': choices = TimeSeriesProcessor.TYPES elif type == 'interpolation': choices = [i.value for i in PandasInterpolation] if request.POST['include_all'] == 'true': choices.append('All') response = TrainingDiaryResponse() response.add_data('choices', sorted([{'text': c, 'id': c} for c in choices], key=lambda c: c["text"])) return JsonResponse(data=response.as_dict())
class DataDefinition: DAY_TO_SQL_NUMBER = { 'All': '-1', 'Sunday': '0', 'Monday': '1', 'Tuesday': '2', 'Wednesday': '3', 'Thursday': '4', 'Friday': '5', 'Saturday': '6', } MONTH_TO_SQL_NUMBER = { 'All': '-1', 'January': '01', 'February': '02', 'March': '03', 'April': '04', 'May': '05', 'June': '06', 'July': '07', 'August': '08', 'September': '09', 'October': '10', 'November': '11', 'December': '12', } def __init__(self, activity='All', activity_type='All', equipment='All', measure='km', day_aggregation_method=DayAggregation.SUM, day_of_week='All', month='All', day_type='All', interpolation='none') -> object: self.activity = activity self.activity_type = activity_type self.equipment = equipment self.measure = measure self.day_aggregation_method = day_aggregation_method self.day_of_week = day_of_week self.sql_day_of_week = self.DAY_TO_SQL_NUMBER[day_of_week] self.month = month self.sql_month = self.MONTH_TO_SQL_NUMBER[month] self.day_type = day_type self.converter = measure_converter(measure) self.target_measure = measure if self.converter is None else self.converter.underlying_measure() self.interpolation = interpolation self.tdm = TrainingDataManager() def title_component(self): components = [self.activity] if self.activity_type != "All": components.append(self.activity_type) if self.equipment != "All": components.append(self.equipment) components.append(self.measure) return " ".join(components) def sql(self, time_period) -> str: tdm = TrainingDataManager(); table = tdm.table_for_measure(self.target_measure) # SELECT sql = self.select_clause(table) sql += f" FROM {table} " # INNER JOIN TO DATE sql += self.inner_join(table) sql += f"WHERE " if table == 'Reading': sql += self.where_clause_reading() else: sql += self.where_clause_workout() sql += f"{table}.date BETWEEN '{time_period.start}' and '{time_period.end}' GROUP BY {table}.date" return sql def inner_join(self, table) -> str: if self.day_type == "All": return "" return f"INNER JOIN Day On {table}.date = Day.date " def select_clause(self, table) -> str: if table == 'Reading': return f"SELECT Reading.date, {sql_for_aggregator(self.day_aggregation_method, 'value')} as {self.target_measure}" else: return f"SELECT Workout.date, {sql_for_aggregator(self.day_aggregation_method, self.target_measure)} as {self.target_measure}" def where_clause_reading(self) -> str: wheres = [f"type='{self.target_measure}'"] sql = "" if self.day_type != 'All': wheres.append(f"Day.type='{self.day_type}'") if self.day_of_week != 'All': wheres.append(f"strftime('%w', Reading.date)='{self.sql_day_of_week}'") if self.month != 'All': wheres.append(f"strftime('%m', Reading.date)='{self.sql_month}'") if len(wheres) > 0: sql = f"{' AND '.join(wheres)} AND " return sql def where_clause_workout(self) -> str: wheres = list() sql = "" if self.activity != 'All': wheres.append(f"Workout.activity='{self.activity}'") if self.activity_type != 'All': wheres.append(f"Workout.activity_type='{self.activity_type}'") if self.equipment != 'All': wheres.append(f"Workout.equipment='{self.equipment}'") if self.day_type != 'All': wheres.append(f"Day.type='{self.day_type}'") if self.day_of_week != 'All': wheres.append(f"strftime('%w', Workout.date)='{self.sql_day_of_week}'") if self.month != 'All': wheres.append(f"strftime('%m', Workout.date)='{self.sql_month}'") if len(wheres) > 0: sql = f"{' AND '.join(wheres)} AND " return sql def day_data(self, time_period): df = self.tdm.day_data_df(time_period, self) if len(df) == 0: df[time_period.start] = 0 if self.converter is not None: df[self.measure] = df[self.target_measure].astype('float') df = df.drop(columns=[self.target_measure]) df[self.measure] = df[self.measure].apply(self.converter.convert_lambda()) df = df.set_index('date') df.index = pd.to_datetime(df.index) df = self.__fill_gaps(df) return df def __fill_gaps(self, df): if self.interpolation == PandasInterpolation.NONE.value: return df max_date = df.index.max() min_date = df.index.min() index = pd.date_range(min_date, max_date) if self.interpolation == PandasInterpolation.FILL_ZERO.value: df =df.reindex(index, fill_value=0) else: df = df.reindex(index) if self.interpolation in {PandasInterpolation.POLYNOMIAL.value, PandasInterpolation.SPLINE.value}: df = df.interpolate(method=self.interpolation, axis=0, order=5) else: df = df.interpolate(method=self.interpolation, axis=0) return df
def summary_view(request): data = [] tdm = TrainingDataManager() workouts = tdm.workouts() swim_km = functools.reduce( operator.add, [w.km for w in workouts if w.activity == 'Swim']) swim_seconds = functools.reduce( operator.add, [w.seconds for w in workouts if w.activity == 'Swim']) bike_km = functools.reduce( operator.add, [w.km for w in workouts if w.activity == 'Bike']) bike_seconds = functools.reduce( operator.add, [w.seconds for w in workouts if w.activity == 'Bike']) run_km = functools.reduce(operator.add, [w.km for w in workouts if w.activity == 'Run']) run_seconds = functools.reduce( operator.add, [w.seconds for w in workouts if w.activity == 'Run']) total_km = functools.reduce(operator.add, [w.km for w in workouts]) total_seconds = functools.reduce(operator.add, [w.seconds for w in workouts]) total_ascent = functools.reduce(operator.add, [w.ascent_metres for w in workouts]) lifetime = [ 'Lifetime', total_seconds, int(total_km), total_ascent, swim_seconds, int(swim_km), bike_seconds, int(bike_km), run_seconds, int(run_km) ] data.append(lifetime) end = datetime.datetime.now().date() start = datetime.date(end.year, 1, 1) data.append(['YTD'] + values_for_range_list(start, end, workouts)) end = datetime.date(end.year - 1, end.month, end.day) start = datetime.date(end.year, 1, 1) data.append(['YTD Last Year'] + values_for_range_list(start, end, workouts)) end = datetime.datetime.now().date() # start = datetime.date(end.year-1, end.month, end.day-1) start = end - datetime.timedelta(days=365) data.append(['R Year'] + values_for_range_list(start, end, workouts)) end = datetime.date(end.year - 1, end.month, end.day) # start = datetime.date(end.year-1, end.month, end.day-1) start = end - datetime.timedelta(days=365) data.append(['R Year Last Year'] + values_for_range_list(start, end, workouts)) end = datetime.datetime.now().date() start = datetime.date(end.year, end.month, 1) data.append(['MTD'] + values_for_range_list(start, end, workouts)) end = datetime.date(end.year - 1, end.month, end.day) start = datetime.date(end.year, end.month, 1) data.append(['MTD Last Year'] + values_for_range_list(start, end, workouts)) equipment_summary = DataWarehouse.instance().equipment_km_annual_summary() headings = [i for i in equipment_summary['All'].keys()] values = list() values.append(['Total'] + [int(equipment_summary['All'][h]) for h in headings]) for k in equipment_summary: if k != 'All': values.append([k] + [int(equipment_summary[k][h]) for h in headings]) activity_summary = DataWarehouse.instance().activity_summary() hours_values = list() km_values = list() hours_values.append(['Total'] + [int(activity_summary['All'][h][0]) for h in headings]) km_values.append(['Total'] + [int(activity_summary['All'][h][1]) for h in headings]) for k in activity_summary: if k != 'All': hours_values.append( [k] + [int(activity_summary[k][h][0]) for h in headings]) km_values.append( [k] + [int(activity_summary[k][h][1]) for h in headings]) headings = [""] + headings return render( request, 'workoutentry/summary.html', { 'headings': SUMMARY_HEADINGS, 'data': data, 'summary_headings': headings, 'equipment_summary': values, 'hours_summary': hours_values, 'km_summary': km_values })
def call_resource(self, request): dd, errors = self._process_data(request.POST['json']) dd_keys = set([d for d in dd.keys()]) data_definition = DataDefinition(activity=dd['activity'], activity_type=dd['activity_type'], equipment=dd['equipment'], measure=dd['measure'], day_aggregation_method=DayAggregation(dd['day_aggregation']), day_type=dd['day_type'], day_of_week=dd['day_of_week'], month=dd['month'], interpolation=dd['interpolation']) dd_keys.remove('activity') dd_keys.remove('activity_type') dd_keys.remove('equipment') dd_keys.remove('measure') dd_keys.remove('day_aggregation') dd_keys.remove('day_type') dd_keys.remove('day_of_week') dd_keys.remove('month') dd_keys.remove('interpolation') period = Period(pandas_period=PandasPeriod(dd['period']), aggregation=Aggregation(dd['period_aggregation']), to_date=dd['to_date'] == 'yes', incl_zeroes=dd['period_include_zeroes'] == 'yes') dd_keys.remove('period') dd_keys.remove('period_aggregation') dd_keys.remove('to_date') dd_keys.remove('period_include_zeroes') if dd['rolling'] == 'yes': rolling_definition = RollingDefinition(periods=int(dd['number_of_rolling_periods']), aggregation=Aggregation(dd['rolling_aggregation']), incl_zeros=dd['rolling_include_zeroes'] == 'yes') else: rolling_definition = NoOpRoller() dd_keys.remove('number_of_rolling_periods') dd_keys.remove('rolling_aggregation') dd_keys.remove('rolling_include_zeroes') dd_keys.remove('rolling') series_definition = SeriesDefinition(period=period, rolling_definition=rolling_definition) processor = self.get_processor(dd) dd_keys.remove('processor_type') response = TrainingDiaryResponse() [response.add_message(response.MSG_ERROR, e) for e in errors] diary_time_period = TrainingDataManager().diary_time_period() data_tp = TimePeriod(diary_time_period.start if dd['series_start'] is None else dd['series_start'], diary_time_period.end if dd['series_end'] is None else dd['series_end']) dd_keys.remove('series_start') dd_keys.remove('series_end') x_axis_number = 1 if 'x_axis_number' in dd: x_axis_number = dd['x_axis_number'] dd_keys.remove('x_axis_number') tss = TimeSeriesManager.TimeSeriesSet(data_definition, series_definition=series_definition, processor=processor, x_axis_number=x_axis_number) ts = TimeSeriesManager().time_series_graph(data_tp, [tss]) response.add_data('time_series', ts) if len(dd_keys) > 0: response.add_message(response.MSG_WARNING, f"The following data was not used: {' ,'.join(dd_keys)}") return JsonResponse(data=response.as_dict())
def populate_readings(self): from workoutentry.training_data import TrainingDataManager self.readings = TrainingDataManager().readings_for_date(self.date) self.reading_count = len(self.readings) rDict = dict() for r in self.readings: rDict[r.reading_type] = r.value if 'sleep' in rDict: self.sleep = rDict['sleep'] if 'sleepQualityScore' in rDict: self.sleepQualityScore = rDict['sleepQualityScore'] if self.sleepQualityScore < 0.2: self.sleep_quality = "Very Poor" elif self.sleepQualityScore < 0.4: self.sleep_quality = "Poor" elif self.sleepQualityScore < 0.6: self.sleep_quality = "Average" elif self.sleepQualityScore < 0.8: self.sleep_quality = "Good" else: self.sleep_quality = "Excellent" else: self.sleep_quality = "Average" if 'fatigue' in rDict: self.fatigue = rDict['fatigue'] if 'motivation' in rDict: self.motivation = rDict['motivation'] if 'restingHR' in rDict: self.restingHR = int(rDict['restingHR']) if 'SDNN' in rDict: self.SDNN = rDict['SDNN'] if 'rMSSD' in rDict: self.rMSSD = rDict['rMSSD'] if 'kg' in rDict: self.kg = rDict['kg'] if 'fatPercentage' in rDict: self.fat_percentage = rDict['fatPercentage'] # values for data warehouse. Want zero instead of null self.dw_sleep = rDict['sleep'] if 'sleep' in rDict else 0.0 self.dw_sleepQualityScore = rDict[ 'sleepQualityScore'] if 'sleepQualityScore' in rDict else 0.0 self.dw_fatigue = rDict['fatigue'] if 'fatigue' in rDict else 0.0 self.dw_motivation = rDict[ 'motivation'] if 'motivation' in rDict else 0.0 self.dw_restingHR = int( rDict['restingHR']) if 'restingHR' in rDict else 0.0 self.dw_SDNN = rDict['SDNN'] if 'SDNN' in rDict else 0.0 self.dw_rMSSD = rDict['rMSSD'] if 'rMSSD' in rDict else 0.0 self.dw_kg = rDict['kg'] if 'kg' in rDict else 0.0 self.dw_fatPercentage = rDict[ 'fatPercentage'] if 'fatPercentage' in rDict else 0.0
def call_resource(self, request): response = TrainingDiaryResponse() tms = TimeSeriesManager() graph = request.POST['graph'] activity = request.POST['activity'] year_str = self._normalise_year_str(request.POST['year']) yr_title = year_str if year_str != 'Total' else "All Time" if year_str == 'Total': tdm = TrainingDataManager() tp = tdm.diary_time_period() else: year = int(year_str) tp = TimePeriod(date(year,1,1), date(year,12,31)) tss_list = list() if graph == 'tss': tss_list.append(TimeSeriesManager.TimeSeriesSet(data_definition=DataDefinition(activity='All' if activity == "Total" else activity, activity_type='All', equipment='All', measure='tss', day_aggregation_method=DayAggregation.SUM), processor=TSBProcessor(7, 7, 42, 42))) elif graph == 'duration': duration_defn = DataDefinition(activity='All' if activity == "Total" else activity, activity_type='All', equipment='All', measure='hours', day_aggregation_method=DayAggregation.SUM) tss_list.append(TimeSeriesManager.TimeSeriesSet(data_definition=duration_defn)) tss_list.append(TimeSeriesManager.TimeSeriesSet(data_definition=duration_defn, series_definition=SeriesDefinition(Period(), RollingDefinition(7, Aggregation.SUM)))) tss_list.append(TimeSeriesManager.TimeSeriesSet(data_definition=duration_defn, series_definition=SeriesDefinition(Period(PandasPeriod.Y_DEC, to_date=True)))) elif graph == 'km': km_defn = DataDefinition(activity='All' if activity == "Total" else activity, activity_type='All', equipment='All', measure='km', day_aggregation_method=DayAggregation.SUM) tss_list.append(TimeSeriesManager.TimeSeriesSet(data_definition=km_defn)) tss_list.append(TimeSeriesManager.TimeSeriesSet(data_definition=km_defn, series_definition=SeriesDefinition(Period(), RollingDefinition(7, Aggregation.SUM)))) tss_list.append(TimeSeriesManager.TimeSeriesSet(data_definition=km_defn, series_definition=SeriesDefinition(Period(PandasPeriod.Y_DEC, to_date=True)))) elif graph == 'reading': reading_defn = DataDefinition(activity='All', activity_type='All', equipment='All', measure=activity, day_aggregation_method=DayAggregation.MEAN) tss_list.append(TimeSeriesManager.TimeSeriesSet(data_definition=reading_defn)) tss_list.append(TimeSeriesManager.TimeSeriesSet(data_definition=reading_defn, series_definition=SeriesDefinition(Period(), RollingDefinition(7, Aggregation.MEAN)))) tss_list.append(TimeSeriesManager.TimeSeriesSet(data_definition=reading_defn, series_definition=SeriesDefinition(Period(), RollingDefinition(31, Aggregation.MEAN)))) elif graph == 'bike': bike_defn = DataDefinition(activity='Bike', activity_type='All', equipment='All' if activity == 'Total' else activity, measure='km', day_aggregation_method=DayAggregation.SUM) tss_list.append(TimeSeriesManager.TimeSeriesSet(data_definition=bike_defn)) tss_list.append(TimeSeriesManager.TimeSeriesSet(data_definition=bike_defn, series_definition=SeriesDefinition(Period(), RollingDefinition(7, Aggregation.SUM)))) tss_list.append(TimeSeriesManager.TimeSeriesSet(data_definition=bike_defn, series_definition=SeriesDefinition(Period(PandasPeriod.Y_DEC, Aggregation.SUM, to_date=True)))) if len(tss_list) > 0: values = tms.time_series_graph(requested_time_period=tp, time_series_list=tss_list) else: values = {'title': "No Data"} response.add_data('chart_title', f"{yr_title} {values['title']}") response.add_data('time_series', values) return JsonResponse(data=response.as_dict())
def __correct_bounds(self, from_date, to_date): earliest_date = TrainingDataManager().earliest_date() latest_date = TrainingDataManager().latest_date() start_date = from_date if from_date >= earliest_date else earliest_date end_date = to_date if to_date <= latest_date else latest_date return (start_date, end_date)
def call_resource(self, request): response = TrainingDiaryResponse() dd, errors = self._process_data(request.POST['json']) [response.add_message(response.MSG_ERROR, e) for e in errors] tdm = TrainingDataManager() primary_key = tdm.workout_primary_key(dd['date'], dd['workout_number']) try: if 'primary_key' not in dd or dd['primary_key'] == "": # new workout lastrowid = tdm.save_workout( **self._filtered_dict(dd, self.NEW_FIELDS)) response.add_message(response.MSG_INFO, f"New workout saved") response.add_data( 'workout', tdm.workout_for_rowid(lastrowid)[0].data_dictionary()) elif primary_key == dd['primary_key']: # update workout tdm.update_workout( **self._filtered_dict(dd, self.UPDATE_FIELDS)) response.add_message(response.MSG_INFO, f"Workout updated") response.add_data( 'workout', tdm.workout_for_primary_key(primary_key) [0].data_dictionary()) else: # changed the date. Need a new workout to get primary_keys right old_key = dd['primary_key'] tdm.delete_workout_for_primary_key(old_key) response.add_data('removed_primary_key', old_key) lastrowid = tdm.save_workout( **self._filtered_dict(dd, self.NEW_FIELDS)) workout = tdm.workout_for_rowid(lastrowid)[0] response.add_message( response.MSG_INFO, f"Workout date changed so old workout deleted and new one added. Remove {old_key} and added {workout.primary_key}" ) response.add_data('workout', workout.data_dictionary()) except TypeError as e: response.set_status(response.ERROR) response.add_message(response.MSG_ERROR, str(e)) return JsonResponse(data=response.as_dict())