def fill_class_summaries_from_logs(user_data_coach, students_data, dt_start_utc): dt_end_utc = dt_start_utc + datetime.timedelta(days = 1) # Asynchronously grab all student data at once async_queries = [] for user_data_student in students_data: query_problem_logs = ProblemLog.get_for_user_data_between_dts(user_data_student, dt_start_utc, dt_end_utc) query_video_logs = VideoLog.get_for_user_data_between_dts(user_data_student, dt_start_utc, dt_end_utc) async_queries.append(query_problem_logs) async_queries.append(query_video_logs) # Wait for all queries to finish results = util.async_queries(async_queries, limit=10000) for i, user_data_student in enumerate(students_data): logging.info("working on student "+str(user_data_student.user)) problem_and_video_logs = [] problem_logs = results[i * 2].get_result() video_logs = results[i * 2 + 1].get_result() for problem_log in problem_logs: problem_and_video_logs.append(problem_log) for video_log in video_logs: problem_and_video_logs.append(video_log) problem_and_video_logs = sorted(problem_and_video_logs, key=lambda log: log.time_started()) if problem_and_video_logs: LogSummary.add_or_update_entry(user_data_coach, problem_and_video_logs, ClassDailyActivitySummary, LogSummaryTypes.CLASS_DAILY_ACTIVITY, 1440)
def fill_class_summaries_from_logs(user_data_coach, students_data, dt_start_utc): dt_end_utc = dt_start_utc + datetime.timedelta(days=1) # Asynchronously grab all student data at once async_queries = [] for user_data_student in students_data: query_problem_logs = ProblemLog.get_for_user_data_between_dts( user_data_student, dt_start_utc, dt_end_utc) query_video_logs = VideoLog.get_for_user_data_between_dts( user_data_student, dt_start_utc, dt_end_utc) async_queries.append(query_problem_logs) async_queries.append(query_video_logs) # Wait for all queries to finish results = util.async_queries(async_queries, limit=10000) for i, user_data_student in enumerate(students_data): logging.info("working on student " + str(user_data_student.user)) problem_and_video_logs = [] problem_logs = results[i * 2].get_result() video_logs = results[i * 2 + 1].get_result() for problem_log in problem_logs: problem_and_video_logs.append(problem_log) for video_log in video_logs: problem_and_video_logs.append(video_log) problem_and_video_logs = sorted(problem_and_video_logs, key=lambda log: log.time_started()) if problem_and_video_logs: LogSummary.add_or_update_entry( user_data_coach, problem_and_video_logs, ClassDailyActivitySummary, LogSummaryTypes.CLASS_DAILY_ACTIVITY, 1440)
def get_classtime_table_old(self, students_data, dt_start_utc): dt_start_ctz = self.dt_to_ctz(dt_start_utc) dt_end_ctz = dt_start_ctz + datetime.timedelta(days=1) column = 0 classtime_table = ClassTimeTable(dt_start_ctz, dt_end_ctz) # Asynchronously grab all student data at once async_queries = [] for user_data_student in students_data: query_problem_logs = ProblemLog.get_for_user_data_between_dts( user_data_student, self.dt_to_utc(dt_start_ctz), self.dt_to_utc(dt_end_ctz)) query_video_logs = VideoLog.get_for_user_data_between_dts( user_data_student, self.dt_to_utc(dt_start_ctz), self.dt_to_utc(dt_end_ctz)) async_queries.append(query_problem_logs) async_queries.append(query_video_logs) # Wait for all queries to finish results = util.async_queries(async_queries, limit=10000) rows = 0 chunks = 0 for i, user_data_student in enumerate(students_data): problem_logs = results[i * 2].get_result() video_logs = results[i * 2 + 1].get_result() problem_and_video_logs = [] for problem_log in problem_logs: problem_and_video_logs.append(problem_log) for video_log in video_logs: problem_and_video_logs.append(video_log) problem_and_video_logs = sorted(problem_and_video_logs, key=lambda log: log.time_started()) rows += len(problem_and_video_logs) chunk_current = None for activity in problem_and_video_logs: if chunk_current is not None and self.dt_to_ctz( activity.time_started()) > (chunk_current.end + self.chunk_delta): chunks += 1 classtime_table.drop_into_column_old(chunk_current, column) chunk_current.description() chunk_current = None if chunk_current is None: chunk_current = ClassTimeChunk() chunk_current.user_data_student = user_data_student chunk_current.start = self.dt_to_ctz( activity.time_started()) chunk_current.end = self.dt_to_ctz(activity.time_ended()) chunk_current.activities.append(activity) chunk_current.end = min(self.dt_to_ctz(activity.time_ended()), dt_end_ctz) if chunk_current is not None: chunks += 1 classtime_table.drop_into_column_old(chunk_current, column) chunk_current.description() column += 1 logging.info("old rows=" + str(rows) + ", old chunks=" + str(chunks)) classtime_table.balance() return classtime_table
def get_classtime_table_old(self, students_data, dt_start_utc): dt_start_ctz = self.dt_to_ctz(dt_start_utc) dt_end_ctz = dt_start_ctz + datetime.timedelta(days = 1) column = 0 classtime_table = ClassTimeTable(dt_start_ctz, dt_end_ctz) # Asynchronously grab all student data at once async_queries = [] for user_data_student in students_data: query_problem_logs = ProblemLog.get_for_user_data_between_dts(user_data_student, self.dt_to_utc(dt_start_ctz), self.dt_to_utc(dt_end_ctz)) query_video_logs = VideoLog.get_for_user_data_between_dts(user_data_student, self.dt_to_utc(dt_start_ctz), self.dt_to_utc(dt_end_ctz)) async_queries.append(query_problem_logs) async_queries.append(query_video_logs) # Wait for all queries to finish results = util.async_queries(async_queries, limit=10000) rows = 0 chunks = 0 for i, user_data_student in enumerate(students_data): problem_logs = results[i * 2].get_result() video_logs = results[i * 2 + 1].get_result() problem_and_video_logs = [] for problem_log in problem_logs: problem_and_video_logs.append(problem_log) for video_log in video_logs: problem_and_video_logs.append(video_log) problem_and_video_logs = sorted(problem_and_video_logs, key=lambda log: log.time_started()) rows += len(problem_and_video_logs) chunk_current = None for activity in problem_and_video_logs: if chunk_current is not None and self.dt_to_ctz(activity.time_started()) > (chunk_current.end + self.chunk_delta): chunks += 1 classtime_table.drop_into_column_old(chunk_current, column) chunk_current.description() chunk_current = None if chunk_current is None: chunk_current = ClassTimeChunk() chunk_current.user_data_student = user_data_student chunk_current.start = self.dt_to_ctz(activity.time_started()) chunk_current.end = self.dt_to_ctz(activity.time_ended()) chunk_current.activities.append(activity) chunk_current.end = min(self.dt_to_ctz(activity.time_ended()), dt_end_ctz) if chunk_current is not None: chunks += 1 classtime_table.drop_into_column_old(chunk_current, column) chunk_current.description() column += 1 logging.info("old rows="+str(rows)+", old chunks="+str(chunks)) classtime_table.balance() return classtime_table