예제 #1
0
def legacy_sqlite_updater(
    legacy_metadata_path: str,
    api_type: str,
    subscription: create_user,
    delete_metadatas: list,
):
    final_result = []
    if os.path.exists(legacy_metadata_path):
        cwd = os.getcwd()
        alembic_location = os.path.join(cwd, "database", "archived_databases",
                                        api_type.lower())
        db_helper.run_migrations(alembic_location, legacy_metadata_path)
        database_name = "user_data"
        session, engine = db_helper.create_database_session(
            legacy_metadata_path)
        database_session: Session = session()
        db_collection = db_helper.database_collection()
        database = db_collection.database_picker(database_name)
        if database:
            if api_type == "Messages":
                api_table_table = database.table_picker(api_type, True)
            else:
                api_table_table = database.table_picker(api_type)
            media_table_table = database.media_table.media_legacy_table
            if api_table_table:
                result = database_session.query(api_table_table).all()
                result2 = database_session.query(media_table_table).all()
                for item in result:
                    item = item.__dict__
                    item["medias"] = []
                    for item2 in result2:
                        if item["post_id"] != item2.post_id:
                            continue
                        item2 = item2.__dict__
                        item2["links"] = [item2["link"]]
                        item["medias"].append(item2)
                        print
                    item["user_id"] = subscription.id
                    item["postedAt"] = item["created_at"]
                    final_result.append(item)
                delete_metadatas.append(legacy_metadata_path)
        database_session.close()
    return final_result, delete_metadatas
예제 #2
0
def export_sqlite(archive_path,
                  datas,
                  parent_type,
                  legacy_fixer=False,
                  api=None):
    metadata_directory = os.path.dirname(archive_path)
    os.makedirs(metadata_directory, exist_ok=True)
    cwd = os.getcwd()
    api_type: str = os.path.basename(archive_path).removesuffix(".db")
    database_path = archive_path
    database_name = parent_type if parent_type else api_type
    database_name = database_name.lower()
    db_collection = db_helper.database_collection()
    database = db_collection.chooser(database_name)
    alembic_location = os.path.join(cwd, "database", "databases",
                                    database_name)
    database_exists = os.path.exists(database_path)
    if database_exists:
        if os.path.getsize(database_path) == 0:
            os.remove(database_path)
            database_exists = False
    if not legacy_fixer:
        x = legacy_database_fixer(database_path, database, database_name,
                                  database_exists)
    db_helper.run_migrations(alembic_location, database_path, api)
    print
    Session, engine = db_helper.create_database_session(database_path)
    database_session = Session()
    api_table = database.api_table
    media_table = database.media_table
    # api_table = db_helper.api_table()
    # media_table = db_helper.media_table()

    for post in datas:
        post_id = post["post_id"]
        postedAt = post["postedAt"]
        date_object = None
        if postedAt:
            if not isinstance(postedAt, datetime):
                date_object = datetime.strptime(postedAt, "%d-%m-%Y %H:%M:%S")
            else:
                date_object = postedAt
        result = database_session.query(api_table)
        post_db = result.filter_by(post_id=post_id).first()
        if not post_db:
            post_db = api_table()
        post_db.post_id = post_id
        post_db.text = post["text"]
        if post["price"] == None:
            post["price"] = 0
        post_db.price = post["price"]
        post_db.paid = post["paid"]
        if date_object:
            post_db.created_at = date_object
        database_session.add(post_db)
        for media in post["medias"]:
            if media["media_type"] == "Texts":
                continue
            media_id = media.get("media_id", None)
            result = database_session.query(media_table)
            media_db = result.filter_by(media_id=media_id).first()
            if not media_db:
                media_db = result.filter_by(filename=media["filename"],
                                            created_at=date_object).first()
                if not media_db:
                    media_db = media_table()
            if legacy_fixer:
                media_db.size = media["size"]
                media_db.downloaded = media["downloaded"]
            media_db.media_id = media_id
            media_db.post_id = post_id
            media_db.link = media["links"][0]
            media_db.preview = media.get("preview", False)
            media_db.directory = media["directory"]
            media_db.filename = media["filename"]
            media_db.media_type = media["media_type"]
            media_db.linked = media.get("linked", None)
            if date_object:
                media_db.created_at = date_object
            database_session.add(media_db)
            print
        print
    print

    database_session.commit()
    database_session.close()
    return Session, api_type, database
예제 #3
0
def export_sqlite(database_path: str, api_type, datas):
    metadata_directory = os.path.dirname(database_path)
    os.makedirs(metadata_directory, exist_ok=True)
    database_name = os.path.basename(database_path).replace(".db", "")
    cwd = os.getcwd()
    alembic_location = os.path.join(cwd, "database", "databases",
                                    database_name.lower())
    db_helper.run_migrations(alembic_location, database_path)
    Session, engine = db_helper.create_database_session(database_path)
    db_collection = db_helper.database_collection()
    database = db_collection.database_picker(database_name)
    if not database:
        return
    database_session = Session()
    api_table = database.table_picker(api_type)
    if not api_table:
        return
    for post in datas:
        post_id = post["post_id"]
        postedAt = post["postedAt"]
        date_object = None
        if postedAt:
            if not isinstance(postedAt, datetime):
                date_object = datetime.strptime(postedAt, "%d-%m-%Y %H:%M:%S")
            else:
                date_object = postedAt
        result = database_session.query(api_table)
        post_db = result.filter_by(post_id=post_id).first()
        if not post_db:
            post_db = api_table()
        if api_type == "Messages":
            post_db.user_id = post["user_id"]
        post_db.post_id = post_id
        post_db.text = post["text"]
        if post["price"] is None:
            post["price"] = 0
        post_db.price = post["price"]
        post_db.paid = post["paid"]
        post_db.archived = post["archived"]
        if date_object:
            post_db.created_at = date_object
        database_session.add(post_db)
        for media in post["medias"]:
            if media["media_type"] == "Texts":
                continue
            created_at = media["created_at"]
            if not isinstance(created_at, datetime):
                date_object = datetime.strptime(created_at,
                                                "%d-%m-%Y %H:%M:%S")
            else:
                date_object = postedAt
            media_id = media.get("media_id", None)
            result = database_session.query(database.media_table)
            media_db = result.filter_by(media_id=media_id).first()
            if not media_db:
                media_db = result.filter_by(filename=media["filename"],
                                            created_at=date_object).first()
                if not media_db:
                    media_db = database.media_table()
            media_db.media_id = media_id
            media_db.post_id = post_id
            if "_sa_instance_state" in post:
                media_db.size = media["size"]
                media_db.downloaded = media["downloaded"]
            media_db.link = media["links"][0]
            media_db.preview = media.get("preview", False)
            media_db.directory = media["directory"]
            media_db.filename = media["filename"]
            media_db.api_type = api_type
            media_db.media_type = media["media_type"]
            media_db.linked = media.get("linked", None)
            if date_object:
                media_db.created_at = date_object
            database_session.add(media_db)
            print
        print
    print
    database_session.commit()
    database_session.close()
    return Session, api_type, database
예제 #4
0
async def fix_sqlite(
    profile_directory,
    download_directory,
    metadata_directory,
    format_directories,
    authed: create_auth,
    site_name,
    username,
    metadata_directory_format,
):
    items = content_types().__dict__.items()
    final_metadatas = []
    for api_type, value in items:
        mandatory_directories = {}
        mandatory_directories["profile_directory"] = profile_directory
        mandatory_directories["download_directory"] = download_directory
        mandatory_directories["metadata_directory"] = metadata_directory
        formatted_directories = await format_directories(
            mandatory_directories,
            authed,
            site_name,
            username,
            metadata_directory_format,
            "",
            api_type,
        )
        final_metadata_directory = formatted_directories["metadata_directory"]
        if all(final_metadata_directory != x for x in final_metadatas):
            final_metadatas.append(final_metadata_directory)
        print
    print
    for final_metadata in final_metadatas:
        archived_database_path = os.path.join(final_metadata, "Archived.db")
        if os.path.exists(archived_database_path):
            Session2, engine = db_helper.create_database_session(
                archived_database_path)
            database_session: Session = Session2()
            cwd = os.getcwd()
            for api_type, value in items:
                database_path = os.path.join(final_metadata, f"{api_type}.db")
                database_name = api_type.lower()
                alembic_location = os.path.join(cwd, "database",
                                                "archived_databases",
                                                database_name)
                result = inspect(engine).has_table(database_name)
                if result:
                    db_helper.run_migrations(alembic_location,
                                             archived_database_path)
                    db_helper.run_migrations(alembic_location, database_path)
                    Session3, engine2 = db_helper.create_database_session(
                        database_path)
                    db_collection = db_helper.database_collection()
                    database_session2: Session = Session3()
                    database = db_collection.database_picker("user_data")
                    if not database:
                        return
                    table_name = database.table_picker(api_type, True)
                    if not table_name:
                        return
                    archived_result = database_session.query(table_name).all()
                    for item in archived_result:
                        result2 = (database_session2.query(table_name).filter(
                            table_name.post_id == item.post_id).first())
                        if not result2:
                            item2 = item.__dict__
                            item2.pop("id")
                            item2.pop("_sa_instance_state")
                            item = table_name(**item2)
                            item.archived = True
                            database_session2.add(item)
                    database_session2.commit()
                    database_session2.close()
            database_session.commit()
            database_session.close()
            os.remove(archived_database_path)