def export_sqlite(database_path: str, api_type, datas): metadata_directory = os.path.dirname(database_path) os.makedirs(metadata_directory, exist_ok=True) database_name = os.path.basename(database_path).replace(".db", "") cwd = os.getcwd() alembic_location = os.path.join(cwd, "database", "databases", database_name.lower()) db_helper.run_migrations(alembic_location, database_path) Session, engine = db_helper.create_database_session(database_path) db_collection = db_helper.database_collection() database = db_collection.database_picker(database_name) if not database: return database_session = Session() api_table = database.table_picker(api_type) if not api_table: return for post in datas: post_id = post["post_id"] postedAt = post["postedAt"] date_object = None if postedAt: if not isinstance(postedAt, datetime): date_object = datetime.strptime(postedAt, "%d-%m-%Y %H:%M:%S") else: date_object = postedAt result = database_session.query(api_table) post_db = result.filter_by(post_id=post_id).first() if not post_db: post_db = api_table() if api_type == "Messages": post_db.user_id = post["user_id"] post_db.post_id = post_id post_db.text = post["text"] if post["price"] is None: post["price"] = 0 post_db.price = post["price"] post_db.paid = post["paid"] post_db.archived = post["archived"] if date_object: post_db.created_at = date_object database_session.add(post_db) for media in post["medias"]: if media["media_type"] == "Texts": continue created_at = media["created_at"] if not isinstance(created_at, datetime): date_object = datetime.strptime(created_at, "%d-%m-%Y %H:%M:%S") else: date_object = postedAt media_id = media.get("media_id", None) result = database_session.query(database.media_table) media_db = result.filter_by(media_id=media_id).first() if not media_db: media_db = result.filter_by(filename=media["filename"], created_at=date_object).first() if not media_db: media_db = database.media_table() media_db.media_id = media_id media_db.post_id = post_id if "_sa_instance_state" in post: media_db.size = media["size"] media_db.downloaded = media["downloaded"] media_db.link = media["links"][0] media_db.preview = media.get("preview", False) media_db.directory = media["directory"] media_db.filename = media["filename"] media_db.api_type = api_type media_db.media_type = media["media_type"] media_db.linked = media.get("linked", None) if date_object: media_db.created_at = date_object database_session.add(media_db) print print print database_session.commit() database_session.close() return Session, api_type, database
def export_sqlite2(archive_path, datas, parent_type, legacy_fixer=False): metadata_directory = os.path.dirname(archive_path) os.makedirs(metadata_directory, exist_ok=True) cwd = os.getcwd() api_type: str = os.path.basename(archive_path).removesuffix(".db") database_path = archive_path database_name = parent_type if parent_type else api_type database_name = database_name.lower() db_collection = db_helper.database_collection() database = db_collection.database_picker(database_name) if not database: return alembic_location = os.path.join(cwd, "database", "databases", database_name) database_exists = os.path.exists(database_path) if database_exists: if os.path.getsize(database_path) == 0: os.remove(database_path) database_exists = False if not legacy_fixer: legacy_database_fixer(database_path, database, database_name, database_exists) db_helper.run_migrations(alembic_location, database_path) print Session, engine = db_helper.create_database_session(database_path) database_session = Session() api_table = database.api_table media_table = database.media_table for post in datas: post_id = post["post_id"] postedAt = post["postedAt"] date_object = None if postedAt: if not isinstance(postedAt, datetime): date_object = datetime.strptime(postedAt, "%d-%m-%Y %H:%M:%S") else: date_object = postedAt result = database_session.query(api_table) post_db = result.filter_by(post_id=post_id).first() if not post_db: post_db = api_table() post_db.post_id = post_id post_db.text = post["text"] if post["price"] is None: post["price"] = 0 post_db.price = post["price"] post_db.paid = post["paid"] post_db.archived = post["archived"] if date_object: post_db.created_at = date_object database_session.add(post_db) for media in post["medias"]: if media["media_type"] == "Texts": continue media_id = media.get("media_id", None) result = database_session.query(media_table) media_db = result.filter_by(media_id=media_id).first() if not media_db: media_db = result.filter_by(filename=media["filename"], created_at=date_object).first() if not media_db: media_db = media_table() if legacy_fixer: media_db.size = media["size"] media_db.downloaded = media["downloaded"] media_db.media_id = media_id media_db.post_id = post_id media_db.link = media["links"][0] media_db.preview = media.get("preview", False) media_db.directory = media["directory"] media_db.filename = media["filename"] media_db.api_type = api_type media_db.media_type = media["media_type"] media_db.linked = media.get("linked", None) if date_object: media_db.created_at = date_object database_session.add(media_db) print print print database_session.commit() database_session.close() return Session, api_type, database