def _handle_get_logbook(handler, path_match, data): """ Return logbook entries. """ date_str = path_match.group("date") if date_str: start_date = dt_util.date_str_to_date(date_str) if start_date is None: handler.write_json_message("Error parsing JSON", HTTP_BAD_REQUEST) return start_day = dt_util.start_of_local_day(start_date) else: start_day = dt_util.start_of_local_day() end_day = start_day + timedelta(days=1) events = recorder.query_events(QUERY_EVENTS_BETWEEN, (dt_util.as_utc(start_day), dt_util.as_utc(end_day))) handler.write_json(humanify(events))
def _handle_get_logbook(handler, path_match, data): """ Return logbook entries. """ date_str = path_match.group('date') if date_str: start_date = dt_util.date_str_to_date(date_str) if start_date is None: handler.write_json_message("Error parsing JSON", HTTP_BAD_REQUEST) return start_day = dt_util.start_of_local_day(start_date) else: start_day = dt_util.start_of_local_day() end_day = start_day + timedelta(days=1) events = recorder.query_events( QUERY_EVENTS_BETWEEN, (dt_util.as_utc(start_day), dt_util.as_utc(end_day))) handler.write_json(humanify(events))
def _api_history_period(handler, path_match, data): """ Return history over a period of time. """ date_str = path_match.group('date') one_day = timedelta(seconds=86400) if date_str: start_date = dt_util.date_str_to_date(date_str) if start_date is None: handler.write_json_message("Error parsing JSON", HTTP_BAD_REQUEST) return start_time = dt_util.as_utc(dt_util.start_of_local_day(start_date)) else: start_time = dt_util.utcnow() - one_day end_time = start_time + one_day entity_id = data.get('filter_entity_id') handler.write_json( get_significant_states(start_time, end_time, entity_id).values())