Ejemplo n.º 1
0
 async def prepare_downloads(self, subscription: user_types):
     global_settings = subscription.get_api().get_global_settings()
     site_settings = subscription.get_api().get_site_settings()
     if not (global_settings and site_settings):
         return
     subscription_directory_manager = subscription.directory_manager
     directory = subscription_directory_manager.root_download_directory
     print
     for api_type, metadata_path in subscription.scraped.__dict__.items():
         metadata_path = (
             subscription_directory_manager.user.metadata_directory.joinpath(
                 "user_data.db"
             )
         )
         database_session, _engine = await db_helper.import_database(metadata_path)
         db_collection = db_helper.database_collection()
         database = db_collection.database_picker("user_data")
         if database:
             media_table = database.media_table
             overwrite_files = site_settings.overwrite_files
             if overwrite_files:
                 download_list: Any = (
                     database_session.query(media_table)
                     .filter(media_table.api_type == api_type)
                     .all()
                 )
                 media_set_count = len(download_list)
             else:
                 download_list: Any = (
                     database_session.query(media_table)
                     .filter(media_table.downloaded == False)
                     .filter(media_table.api_type == api_type)
                 )
                 media_set_count = db_helper.get_count(download_list)
             location = ""
             string = "Download Processing\n"
             string += f"Name: {subscription.username} | Type: {api_type} | Count: {media_set_count}{location} | Directory: {directory}\n"
             if media_set_count:
                 print(string)
                 await main_helper.async_downloads(
                     download_list, subscription, global_settings
                 )
             while True:
                 try:
                     database_session.commit()
                     break
                 except OperationalError:
                     database_session.rollback()
             database_session.close()
Ejemplo n.º 2
0
def legacy_sqlite_updater(
    legacy_metadata_path: str,
    api_type: str,
    subscription: create_user,
    delete_metadatas: list,
):
    final_result = []
    if os.path.exists(legacy_metadata_path):
        cwd = os.getcwd()
        alembic_location = os.path.join(cwd, "database", "archived_databases",
                                        api_type.lower())
        db_helper.run_migrations(alembic_location, legacy_metadata_path)
        database_name = "user_data"
        session, engine = db_helper.create_database_session(
            legacy_metadata_path)
        database_session: Session = session()
        db_collection = db_helper.database_collection()
        database = db_collection.database_picker(database_name)
        if database:
            if api_type == "Messages":
                api_table_table = database.table_picker(api_type, True)
            else:
                api_table_table = database.table_picker(api_type)
            media_table_table = database.media_table.media_legacy_table
            if api_table_table:
                result = database_session.query(api_table_table).all()
                result2 = database_session.query(media_table_table).all()
                for item in result:
                    item = item.__dict__
                    item["medias"] = []
                    for item2 in result2:
                        if item["post_id"] != item2.post_id:
                            continue
                        item2 = item2.__dict__
                        item2["links"] = [item2["link"]]
                        item["medias"].append(item2)
                        print
                    item["user_id"] = subscription.id
                    item["postedAt"] = item["created_at"]
                    final_result.append(item)
                delete_metadatas.append(legacy_metadata_path)
        database_session.close()
    return final_result, delete_metadatas
Ejemplo n.º 3
0
 def __init__(self, api=None, subscription=None) -> None:
     if api:
         username = subscription.username
         download_info = subscription.download_info
         if download_info:
             self.downloaded = True
             metadata_locations = download_info["metadata_locations"]
             directory = download_info["directory"]
             for parent_type, value in metadata_locations.items():
                 for api_type, metadata_path in value.items():
                     Session, engine = db_helper.create_database_session(
                         metadata_path)
                     database_session = Session()
                     database_name = api_type.lower()
                     db_collection = db_helper.database_collection()
                     database = db_collection.chooser(database_name)
                     api_table = database.api_table
                     media_table = database.media_table
                     result = database_session.query(media_table).all()
                     media_type_list = media_types()
                     for r in result:
                         item = getattr(media_type_list, r.media_type)
                         item.append(r)
                     media_type_list = media_type_list.__dict__
                     for location, v in media_type_list.items():
                         if location == "Texts":
                             continue
                         media_set = v
                         string = "Download Processing\n"
                         string += f"Name: {username} | Type: {api_type} | Count: {len(media_set)} {location} | Directory: {directory}\n"
                         print(string)
                         pool = multiprocessing()
                         pool.starmap(self.download, product(
                             media_set, [api]))
                     database_session.commit()
         else:
             self.downloaded = False
Ejemplo n.º 4
0
def export_sqlite(archive_path,
                  datas,
                  parent_type,
                  legacy_fixer=False,
                  api=None):
    metadata_directory = os.path.dirname(archive_path)
    os.makedirs(metadata_directory, exist_ok=True)
    cwd = os.getcwd()
    api_type: str = os.path.basename(archive_path).removesuffix(".db")
    database_path = archive_path
    database_name = parent_type if parent_type else api_type
    database_name = database_name.lower()
    db_collection = db_helper.database_collection()
    database = db_collection.chooser(database_name)
    alembic_location = os.path.join(cwd, "database", "databases",
                                    database_name)
    database_exists = os.path.exists(database_path)
    if database_exists:
        if os.path.getsize(database_path) == 0:
            os.remove(database_path)
            database_exists = False
    if not legacy_fixer:
        x = legacy_database_fixer(database_path, database, database_name,
                                  database_exists)
    db_helper.run_migrations(alembic_location, database_path, api)
    print
    Session, engine = db_helper.create_database_session(database_path)
    database_session = Session()
    api_table = database.api_table
    media_table = database.media_table
    # api_table = db_helper.api_table()
    # media_table = db_helper.media_table()

    for post in datas:
        post_id = post["post_id"]
        postedAt = post["postedAt"]
        date_object = None
        if postedAt:
            if not isinstance(postedAt, datetime):
                date_object = datetime.strptime(postedAt, "%d-%m-%Y %H:%M:%S")
            else:
                date_object = postedAt
        result = database_session.query(api_table)
        post_db = result.filter_by(post_id=post_id).first()
        if not post_db:
            post_db = api_table()
        post_db.post_id = post_id
        post_db.text = post["text"]
        if post["price"] == None:
            post["price"] = 0
        post_db.price = post["price"]
        post_db.paid = post["paid"]
        if date_object:
            post_db.created_at = date_object
        database_session.add(post_db)
        for media in post["medias"]:
            if media["media_type"] == "Texts":
                continue
            media_id = media.get("media_id", None)
            result = database_session.query(media_table)
            media_db = result.filter_by(media_id=media_id).first()
            if not media_db:
                media_db = result.filter_by(filename=media["filename"],
                                            created_at=date_object).first()
                if not media_db:
                    media_db = media_table()
            if legacy_fixer:
                media_db.size = media["size"]
                media_db.downloaded = media["downloaded"]
            media_db.media_id = media_id
            media_db.post_id = post_id
            media_db.link = media["links"][0]
            media_db.preview = media.get("preview", False)
            media_db.directory = media["directory"]
            media_db.filename = media["filename"]
            media_db.media_type = media["media_type"]
            media_db.linked = media.get("linked", None)
            if date_object:
                media_db.created_at = date_object
            database_session.add(media_db)
            print
        print
    print

    database_session.commit()
    database_session.close()
    return Session, api_type, database
Ejemplo n.º 5
0
    print


try:
    if __name__ == "__main__":
        cwd = os.getcwd()
        cwd2 = os.path.dirname(__file__)
        if cwd == cwd2:
            x = os.path.realpath("../../../")
        else:
            x = os.path.realpath("")
        sys.path.insert(0, x)
        while True:
            from helpers.db_helper import database_collection, run_revisions

            db_collection = database_collection()
            key_list = db_collection.__dict__.items()
            key_list = list(key_list)
            string = f""
            count = 0
            for key, item in key_list:
                print
                string += f"{str(count)} = {key} | "
                count += 1
            print(string)
            x = input()
            # x = 0
            x = int(x)
            database_path = None
            module = key_list[x][1]
            if module:
Ejemplo n.º 6
0
def export_sqlite(database_path: str, api_type, datas):
    metadata_directory = os.path.dirname(database_path)
    os.makedirs(metadata_directory, exist_ok=True)
    database_name = os.path.basename(database_path).replace(".db", "")
    cwd = os.getcwd()
    alembic_location = os.path.join(cwd, "database", "databases",
                                    database_name.lower())
    db_helper.run_migrations(alembic_location, database_path)
    Session, engine = db_helper.create_database_session(database_path)
    db_collection = db_helper.database_collection()
    database = db_collection.database_picker(database_name)
    if not database:
        return
    database_session = Session()
    api_table = database.table_picker(api_type)
    if not api_table:
        return
    for post in datas:
        post_id = post["post_id"]
        postedAt = post["postedAt"]
        date_object = None
        if postedAt:
            if not isinstance(postedAt, datetime):
                date_object = datetime.strptime(postedAt, "%d-%m-%Y %H:%M:%S")
            else:
                date_object = postedAt
        result = database_session.query(api_table)
        post_db = result.filter_by(post_id=post_id).first()
        if not post_db:
            post_db = api_table()
        if api_type == "Messages":
            post_db.user_id = post["user_id"]
        post_db.post_id = post_id
        post_db.text = post["text"]
        if post["price"] is None:
            post["price"] = 0
        post_db.price = post["price"]
        post_db.paid = post["paid"]
        post_db.archived = post["archived"]
        if date_object:
            post_db.created_at = date_object
        database_session.add(post_db)
        for media in post["medias"]:
            if media["media_type"] == "Texts":
                continue
            created_at = media["created_at"]
            if not isinstance(created_at, datetime):
                date_object = datetime.strptime(created_at,
                                                "%d-%m-%Y %H:%M:%S")
            else:
                date_object = postedAt
            media_id = media.get("media_id", None)
            result = database_session.query(database.media_table)
            media_db = result.filter_by(media_id=media_id).first()
            if not media_db:
                media_db = result.filter_by(filename=media["filename"],
                                            created_at=date_object).first()
                if not media_db:
                    media_db = database.media_table()
            media_db.media_id = media_id
            media_db.post_id = post_id
            if "_sa_instance_state" in post:
                media_db.size = media["size"]
                media_db.downloaded = media["downloaded"]
            media_db.link = media["links"][0]
            media_db.preview = media.get("preview", False)
            media_db.directory = media["directory"]
            media_db.filename = media["filename"]
            media_db.api_type = api_type
            media_db.media_type = media["media_type"]
            media_db.linked = media.get("linked", None)
            if date_object:
                media_db.created_at = date_object
            database_session.add(media_db)
            print
        print
    print
    database_session.commit()
    database_session.close()
    return Session, api_type, database
Ejemplo n.º 7
0
async def fix_sqlite(
    profile_directory,
    download_directory,
    metadata_directory,
    format_directories,
    authed: create_auth,
    site_name,
    username,
    metadata_directory_format,
):
    items = content_types().__dict__.items()
    final_metadatas = []
    for api_type, value in items:
        mandatory_directories = {}
        mandatory_directories["profile_directory"] = profile_directory
        mandatory_directories["download_directory"] = download_directory
        mandatory_directories["metadata_directory"] = metadata_directory
        formatted_directories = await format_directories(
            mandatory_directories,
            authed,
            site_name,
            username,
            metadata_directory_format,
            "",
            api_type,
        )
        final_metadata_directory = formatted_directories["metadata_directory"]
        if all(final_metadata_directory != x for x in final_metadatas):
            final_metadatas.append(final_metadata_directory)
        print
    print
    for final_metadata in final_metadatas:
        archived_database_path = os.path.join(final_metadata, "Archived.db")
        if os.path.exists(archived_database_path):
            Session2, engine = db_helper.create_database_session(
                archived_database_path)
            database_session: Session = Session2()
            cwd = os.getcwd()
            for api_type, value in items:
                database_path = os.path.join(final_metadata, f"{api_type}.db")
                database_name = api_type.lower()
                alembic_location = os.path.join(cwd, "database",
                                                "archived_databases",
                                                database_name)
                result = inspect(engine).has_table(database_name)
                if result:
                    db_helper.run_migrations(alembic_location,
                                             archived_database_path)
                    db_helper.run_migrations(alembic_location, database_path)
                    Session3, engine2 = db_helper.create_database_session(
                        database_path)
                    db_collection = db_helper.database_collection()
                    database_session2: Session = Session3()
                    database = db_collection.database_picker("user_data")
                    if not database:
                        return
                    table_name = database.table_picker(api_type, True)
                    if not table_name:
                        return
                    archived_result = database_session.query(table_name).all()
                    for item in archived_result:
                        result2 = (database_session2.query(table_name).filter(
                            table_name.post_id == item.post_id).first())
                        if not result2:
                            item2 = item.__dict__
                            item2.pop("id")
                            item2.pop("_sa_instance_state")
                            item = table_name(**item2)
                            item.archived = True
                            database_session2.add(item)
                    database_session2.commit()
                    database_session2.close()
            database_session.commit()
            database_session.close()
            os.remove(archived_database_path)