async def get_statuses() -> list[models.Statuses]: logger.debug("Getting statuses") stmt = sa.select(models.Statuses) async with database.session() as ses: return (await ses.execute(stmt)).all()
async def complete_material(*, material_id: UUID, completion_date: Optional[datetime.date] = None) -> None: completion_date = completion_date or database.today().date() logger.debug("Completing material_id=%s", material_id) get_status_stmt = sa.select(models.Statuses)\ .where(models.Statuses.c.material_id == str(material_id)) update_status_stmt = models.Statuses\ .update().values(completed_at=completion_date)\ .where(models.Statuses.c.material_id == str(material_id)) async with database.session() as ses: status = (await ses.execute(get_status_stmt)).mappings().first() if status is None: raise ValueError("Material_id=%s not assigned", material_id) if status.completed_at is not None: raise ValueError("Material_id=%s even completed", material_id) if status.started_at > completion_date: raise ValueError await ses.execute(update_status_stmt) logger.debug("Material_id=%s completed at %s", material_id, completion_date)
async def get_cards_count() -> int: logger.debug("Getting amount of cards") stmt = sa.select(sa.func.count(1))\ .select_from(models.Cards) async with database.session() as ses: return await ses.scalar(stmt)
async def does_material_exist(*, material_id: UUID) -> bool: logger.debug("Whether material_id=%s exists", material_id) stmt = sa.select(models.Materials.c.material_id)\ .where(models.Materials.c.material_id == str(material_id)) async with database.session() as ses: return await ses.scalar(stmt) is not None
async def get_status(*, material_id: UUID) -> Optional[RowMapping]: logger.debug("Getting status for material_id=%s", material_id) stmt = sa.select(models.Statuses) \ .where(models.Statuses.c.material_id == str(material_id)) async with database.session() as ses: return (await ses.execute(stmt)).mappings().one_or_none()
async def get_log_records() -> dict[datetime.date, RowMapping]: logger.debug("Getting all log records") stmt = sa.select([models.ReadingLog, models.Materials.c.title.label('material_title')])\ .join(models.Materials, models.Materials.c.material_id == models.ReadingLog.c.material_id) async with database.session() as ses: return {row.date: row async for row in await ses.stream(stmt)}
async def data() -> AsyncGenerator[tuple[datetime.date, LogRecord], None]: """ Get pairs: (date, info) of all days from start to stop. If the day is empty, material_id is supposed as the material_id of the last not empty day. """ logger.debug("Getting data from log") if not (log_records := await get_log_records()): return
async def get_completed_materials() -> list[RowMapping]: logger.debug("Getting completed materials") stmt = sa.select([models.Materials, models.Statuses]) \ .join(models.Statuses, models.Materials.c.material_id == models.Statuses.c.material_id) \ .where(models.Statuses.c.completed_at != None) async with database.session() as ses: return (await ses.execute(stmt)).mappings().all()
def _dump_snapshot(db_snapshot: SNAPSHOT) -> Path: logger.debug("DB dumping started") file_path = Path("data") / f"tracker_{_get_now()}.json" with file_path.open('w') as f: ujson.dump(db_snapshot, f, ensure_ascii=False, indent=2) logger.debug("DB dumped") return file_path
async def get_material_titles() -> dict[UUID, str]: logger.debug("Getting material titles") stmt = sa.select( [models.Materials.c.material_id, models.Materials.c.title]) async with database.session() as ses: return { row.material_id: row.title async for row in await ses.stream(stmt) }
def _send_dump(file_path: Path) -> None: logger.debug("Sending file %s", file_path) file_metadata = { 'name': f"{file_path.name}", 'parents': [_get_folder_id()] } file = MediaFileUpload(file_path, mimetype='application/json') with drive_client() as client: client.files().create(body=file_metadata, media_body=file).execute() logger.debug("File sent")
async def get_free_materials() -> list[RowMapping]: logger.debug("Getting free materials") assigned_condition = sa.select(1) \ .select_from(models.Statuses) \ .where(models.Statuses.c.material_id == models.Materials.c.material_id) stmt = sa.select(models.Materials)\ .where(~sa.exists(assigned_condition)) \ async with database.session() as ses: return (await ses.execute(stmt)).all()
async def is_material_assigned(*, material_id: UUID) -> bool: logger.debug("Checking material_id=%s", material_id) stmt = sa.select(models.Materials.c.material_id) \ .join(models.Statuses, models.Materials.c.material_id == models.Statuses.c.material_id) \ .where(models.Statuses.c.started_at != None) \ .where(models.Materials.c.material_id == str(material_id)) async with database.session() as ses: return await ses.scalar(stmt) is not None
async def set_log(*, material_id: UUID, count: int, date: datetime.date) -> None: logger.debug("Setting log for material_id=%s, count=%s, date=%s: ", material_id, count, date) values = {'material_id': str(material_id), 'count': count, 'date': date} stmt = models.ReadingLog \ .insert().values(values) async with database.session() as ses: await ses.execute(stmt) logger.debug("Log record added")
def _get_last_dump() -> str: logger.debug("Getting last dump started") folder_id = _get_folder_id() query = f"name contains 'tracker_' and mimeType='application/json' and '{folder_id}' in parents" with drive_client() as client: response = client.files().list( q=query, spaces='drive', fields='files(id,modifiedTime,name)')\ .execute() files = response['files'] files.sort(key=lambda resp: resp['modifiedTime'], reverse=True) logger.debug("%s files found", len(files)) return files[0]['id']
async def get_reading_material_titles() -> dict[UUID, str]: logger.debug("Getting material titles") stmt = sa.select([models.Materials.c.material_id, models.Materials.c.title])\ .join(models.Statuses, models.Statuses.c.material_id == models.Materials.c.material_id)\ .where(models.Statuses.c.completed_at == None) async with database.session() as ses: return { row.material_id: row.title async for row in await ses.stream(stmt) }
async def get_completion_dates() -> dict[UUID, datetime.date]: logger.debug("Getting completion dates") stmt = sa.select([models.Materials.c.material_id, models.Statuses.c.completed_at]) \ .join(models.Statuses, models.Statuses.c.material_id == models.Materials.c.material_id) \ .where(models.Statuses.c.completed_at != None) async with database.session() as ses: return { row.material_id: row.completed_at async for row in await ses.stream(stmt) }
async def get_material_statistics(*, material_id: UUID) -> MaterialStatistics: """ Calculate statistics for reading or completed material """ logger.debug("Calculating statistics for material_id=%s", material_id) material = await get_material(material_id=material_id) status = await get_status(material_id=material_id) assert status is not None assert material is not None avg_total = await statistics.get_avg_read_pages() if await _was_material_being_reading(material_id=material_id): log_st = await statistics.get_m_log_statistics(material_id=material_id) avg, total = log_st.average, log_st.total duration, lost_time = log_st.duration, log_st.lost_time max_record, min_record = log_st.max_record, log_st.min_record else: avg = 1 total = duration = lost_time = 0 max_record = min_record = None if status.completed_at is None: remaining_pages = material.pages - total remaining_days = round(remaining_pages / avg) if not await _was_material_being_reading(material_id=material_id): remaining_days = round(remaining_pages / avg_total) would_be_completed = database.today() + datetime.timedelta(days=remaining_days) else: would_be_completed = remaining_days = remaining_pages = None # type: ignore return MaterialStatistics( material=material, started_at=status.started_at, completed_at=status.completed_at, duration=duration, lost_time=lost_time, total=total, min_record=min_record, max_record=max_record, average=avg, remaining_pages=remaining_pages, remaining_days=remaining_days, would_be_completed=would_be_completed )
def _download_file(file_id: str) -> Path: logger.debug("Downloading file id='%s'", file_id) path = Path('data') / 'restore.json' with drive_client() as client: request = client.files().get_media(fileId=file_id) fh = io.BytesIO() downloader = MediaIoBaseDownload(fh, request) done = False while done is False: status, done = downloader.next_chunk() logger.debug("Download %d%%.", int(status.progress() * 100)) fh.seek(0) with path.open('wb') as f: f.write(fh.read()) return path
async def add_material(*, title: str, authors: str, pages: int, tags: Optional[str]) -> None: logger.debug("Adding material title=%s", title) values = { "title": title, "authors": authors, "pages": pages, "tags": tags } stmt = models.Materials\ .insert().values(values) async with database.session() as ses: await ses.execute(stmt) logger.debug("Material added")
async def start_material(*, material_id: UUID, start_date: Optional[datetime.date] = None) -> None: start_date = start_date or database.today().date() logger.debug("Starting material_id=%s", material_id) if start_date > database.today().date(): raise ValueError("Start date must be less than today") values = { "material_id": str(material_id), "started_at": start_date } stmt = models.Statuses\ .insert().values(values) async with database.session() as ses: await ses.execute(stmt) logger.debug("Material material_id=%s started", material_id)
async def _restore_db(dump_path: Path) -> None: if not dump_path.exists(): raise ValueError("Dump file not found") with dump_path.open() as f: data = ujson.load(f) async with database.session() as ses: # order of them matters for table in TABLES: values = [{ key: _convert_str_to_date(value) for key, value in record.items() } for record in data[table.name]] logger.debug("Inserting %s values to %s", len(values), table.name) stmt = table.insert().values(values) await ses.execute(stmt) logger.debug("Data into %s inserted", table.name)
async def add_card(*, material_id: UUID, note_id: UUID, question: str, answer: Optional[str] = None) -> None: logger.debug("Adding new card") values = { "material_id": str(material_id), "note_id": str(note_id), "question": question, "answer": answer, } stmt = models.Cards\ .insert().values(values) async with database.session() as ses: await ses.execute(stmt) logger.debug("Card added")
async def add_note(*, material_id: UUID, content: str, chapter: int, page: int, date: Optional[datetime.date] = None) -> None: date = date or database.today() logger.debug("Adding note for material_id=%s at %s", material_id, date) values = { 'material_id': str(material_id), 'content': content, 'chapter': chapter, 'page': page, 'added_at': date } stmt = models.Notes.\ insert().values(values)\ .returning(models.Notes.c.note_id) async with database.session() as ses: note_id = (await ses.execute(stmt)).one()[0] logger.debug("Note_id=%s added", note_id)
async def get_notes() -> list[RowMapping]: logger.debug("Getting notes") stmt = sa.select(models.Notes) async with database.session() as ses: return (await ses.execute(stmt)).mappings().all()
def _remove_file(file_path: Path) -> None: logger.debug("Removing '%s'", file_path) os.remove(file_path) logger.debug("File removed")