Пример #1
0
def check_profiles():
    file_name = "config.json"
    path = os.path.join('.settings', file_name)
    import helpers.main_helper as main_helper
    from apis.onlyfans.onlyfans import auth_details
    json_config, json_config2 = main_helper.get_config(path)
    json_settings = json_config["settings"]
    profile_directories = json_settings["profile_directories"]
    profile_directory = profile_directories[0]
    matches = ["OnlyFans"]
    for match in matches:
        q = os.path.abspath(profile_directory)
        profile_site_directory = os.path.join(q, match)
        if os.path.exists(profile_site_directory):
            e = os.listdir(profile_site_directory)
            e = [os.path.join(profile_site_directory, x, "auth.json")
                 for x in e]
            e = [x for x in e if os.path.exists(x)]
            if e:
                continue
        default_profile_directory = os.path.join(
            profile_site_directory, "default")
        os.makedirs(default_profile_directory, exist_ok=True)
        auth_filepath = os.path.join(default_profile_directory, "auth.json")
        if not os.path.exists(auth_filepath):
            x = auth_details().__dict__
            main_helper.export_json(auth_filepath, x)
            string = f"{auth_filepath} has been created. Fill in the relevant details and then press enter to continue."
            input(string)
        print
    print
Пример #2
0
def check_profiles():
    config_path = Path(".settings", "config.json")
    import helpers.main_helper as main_helper
    from apis.onlyfans.onlyfans import auth_details as onlyfans_auth_details
    from apis.fansly.fansly import auth_details as fansly_auth_details
    from apis.starsavn.starsavn import auth_details as starsavn_auth_details

    config, _updated = main_helper.get_config(config_path)
    settings = config.settings
    profile_directories = settings.profile_directories
    profile_directory = profile_directories[0]
    matches = ["OnlyFans", "Fansly", "StarsAVN"]
    for string_match in matches:
        profile_site_directory = profile_directory.joinpath(string_match)
        if os.path.exists(profile_site_directory):
            e = os.listdir(profile_site_directory)
            e = [os.path.join(profile_site_directory, x, "auth.json") for x in e]
            e = [x for x in e if os.path.exists(x)]
            if e:
                continue
        default_profile_directory = profile_site_directory.joinpath("default")
        os.makedirs(default_profile_directory, exist_ok=True)
        auth_filepath = default_profile_directory.joinpath("auth.json")
        if not os.path.exists(auth_filepath):
            new_item: dict[str, Any] = {}
            match string_match:
                case "OnlyFans":
                    new_item["auth"] = onlyfans_auth_details().export()

                case "Fansly":
                    new_item["auth"] = fansly_auth_details().export()

                case "StarsAVN":
                    new_item["auth"] = starsavn_auth_details().export()
                case _:
                    continue
            main_helper.export_json(new_item, auth_filepath)
            main_helper.prompt_modified(
                f"{auth_filepath} has been created. Fill in the relevant details and then press enter to continue.",
                auth_filepath,
            )
        print
    print
Пример #3
0
    async def process_mass_messages(authed: create_auth,
                                    mass_messages: list[create_message]):
        def compare_message(queue_id, remote_messages):
            for message in remote_messages:
                if "isFromQueue" in message and message["isFromQueue"]:
                    if queue_id == message["queueId"]:
                        return message
                    print
            print

        global_found = []
        chats = []
        api = authed.get_api()
        site_settings = api.get_site_settings()
        config = api.config
        if not (config and site_settings):
            return
        settings = config.settings
        salt = settings.random_string
        encoded = f"{salt}"
        encoded = encoded.encode("utf-8")
        hash = hashlib.md5(encoded).hexdigest()
        profile_directory = authed.directory_manager.profile.metadata_directory
        mass_message_path = profile_directory.joinpath("Mass Messages.json")
        chats_path = profile_directory.joinpath("Chats.json")
        if os.path.exists(chats_path):
            chats = main_helper.import_json(chats_path)
        date_object = datetime.today()
        date_string = date_object.strftime("%d-%m-%Y %H:%M:%S")
        for mass_message in mass_messages:
            if "status" not in mass_message:
                mass_message["status"] = ""
            if "found" not in mass_message:
                mass_message["found"] = {}
            if "hashed_ip" not in mass_message:
                mass_message["hashed_ip"] = ""
            mass_message["hashed_ip"] = mass_message.get("hashed_ip", hash)
            mass_message["date_hashed"] = mass_message.get(
                "date_hashed", date_string)
            if mass_message["isCanceled"]:
                continue
            queue_id = mass_message["id"]
            text = mass_message["textCropped"]
            text = html.unescape(text)
            mass_found = mass_message["found"]
            media_type = mass_message.get("mediaType")
            media_types = mass_message.get("mediaTypes")
            if mass_found or (not media_type and not media_types):
                continue
            identifier = None
            if chats:
                list_chats = chats
                for chat in list_chats:
                    identifier = chat["identifier"]
                    messages = chat["messages"]["list"]
                    mass_found = compare_message(queue_id, messages)
                    if mass_found:
                        mass_message["found"] = mass_found
                        mass_message["status"] = True
                        break
            if not mass_found:
                list_chats = authed.search_messages(text=text, limit=2)
                if not list_chats:
                    continue
                for item in list_chats["list"]:
                    user = item["withUser"]
                    identifier = user["id"]
                    messages = []
                    print("Getting Messages")
                    keep = ["id", "username"]
                    list_chats2 = [
                        x for x in chats if x["identifier"] == identifier
                    ]
                    if list_chats2:
                        chat2 = list_chats2[0]
                        messages = chat2["messages"]["list"]
                        messages = authed.get_messages(identifier=identifier,
                                                       resume=messages)
                        for message in messages:
                            message["withUser"] = {
                                k: item["withUser"][k]
                                for k in keep
                            }
                            message["fromUser"] = {
                                k: message["fromUser"][k]
                                for k in keep
                            }
                        mass_found = compare_message(queue_id, messages)
                        if mass_found:
                            mass_message["found"] = mass_found
                            mass_message["status"] = True
                            break
                    else:
                        item2 = {}
                        item2["identifier"] = identifier
                        item2["messages"] = authed.get_messages(
                            identifier=identifier)
                        chats.append(item2)
                        messages = item2["messages"]["list"]
                        for message in messages:
                            message["withUser"] = {
                                k: item["withUser"][k]
                                for k in keep
                            }
                            message["fromUser"] = {
                                k: message["fromUser"][k]
                                for k in keep
                            }
                        mass_found = compare_message(queue_id, messages)
                        if mass_found:
                            mass_message["found"] = mass_found
                            mass_message["status"] = True
                            break
                        print
                    print
                print
            if not mass_found:
                mass_message["status"] = False
        main_helper.export_json(chats, chats_path)
        for mass_message in mass_messages:
            found = mass_message["found"]
            if found and found["media"]:
                user = found["withUser"]
                identifier = user["id"]
                print
                date_hashed_object = datetime.strptime(
                    mass_message["date_hashed"], "%d-%m-%Y %H:%M:%S")
                next_date_object = date_hashed_object + timedelta(days=1)
                print
                if mass_message[
                        "hashed_ip"] != hash or date_object > next_date_object:
                    print("Getting Message By ID")
                    x = await authed.get_message_by_id(identifier=identifier,
                                                       identifier2=found["id"],
                                                       limit=1)
                    new_found = x["result"]["list"][0]
                    new_found["withUser"] = found["withUser"]
                    mass_message["found"] = new_found
                    mass_message["hashed_ip"] = hash
                    mass_message["date_hashed"] = date_string
                global_found.append(found)
            print
        print
        main_helper.export_json(mass_messages, mass_message_path)
        return global_found
Пример #4
0
 async def default(
     datascraper: Optional[m_onlyfans.OnlyFansDataScraper]
     | Optional[m_fansly.FanslyDataScraper]
     | Optional[m_starsavn.StarsAVNDataScraper],
 ):
     if not datascraper:
         return
     api = datascraper.api
     global_settings = api.get_global_settings()
     site_settings = api.get_site_settings()
     if not (global_settings and site_settings):
         return
     await main_helper.process_profiles(api, global_settings)
     subscription_array: list[user_types] = []
     auth_count = 0
     profile_options = OptionsFormat(
         api.auths, "profiles", site_settings.auto_profile_choice
     )
     api.auths = profile_options.final_choices
     identifiers = []
     if site_settings.auto_model_choice:
         subscription_options = OptionsFormat(
             subscription_array, "subscriptions", site_settings.auto_model_choice
         )
         if not subscription_options.scrape_all():
             identifiers = subscription_options.choice_list
     for auth in api.auths:
         auth: auth_types = auth
         if not auth.auth_details:
             continue
         setup = False
         setup, subscriptions = await account_setup(
             auth, datascraper, site_settings, identifiers
         )
         if not setup:
             if webhooks:
                 await main_helper.process_webhooks(
                     api, "auth_webhook", "failed", global_settings
                 )
             auth_details: dict[str, Any] = {}
             auth_details["auth"] = auth.auth_details.export()
             profile_directory = api.base_directory_manager.profile.root_directory
             user_auth_filepath = profile_directory.joinpath(
                 api.site_name, auth.auth_details.username, "auth.json"
             )
             main_helper.export_json(auth_details, user_auth_filepath)
             continue
         auth_count += 1
         subscription_array.extend(subscriptions)
         await main_helper.process_webhooks(
             api, "auth_webhook", "succeeded", global_settings
         )
         # Do stuff with authed user
     subscription_options = OptionsFormat(
         subscription_array, "subscriptions", site_settings.auto_model_choice
     )
     datascraper.subscription_options = subscription_options
     subscription_list = subscription_options.final_choices
     await main_helper.process_jobs(datascraper, subscription_list, site_settings)
     await main_helper.process_downloads(api, datascraper, global_settings)
     if webhooks:
         await main_helper.process_webhooks(
             api, "download_webhook", "succeeded", global_settings
         )
Пример #5
0
def start_datascraper(json_config,
                      site_name_lower,
                      apis: list = [],
                      webhooks=True):
    json_settings = json_config["settings"]
    json_sites = json_config["supported"]
    domain = json_settings["auto_site_choice"]
    main_helper.assign_vars(json_config)

    json_site_settings = json_sites[site_name_lower]["settings"]

    auto_scrape_names = json_site_settings["auto_scrape_names"]
    if isinstance(auto_scrape_names, str):
        temp_identifiers = auto_scrape_names.split(",")
        identifiers = [x for x in temp_identifiers if x]
    else:
        identifiers = []
    auto_profile_choice = json_site_settings["auto_profile_choice"]
    subscription_array = []
    original_sessions = []
    original_sessions = api_helper.create_session(settings=json_settings)
    original_sessions = [x for x in original_sessions if x]
    if not original_sessions:
        print("Unable to create session")
        return False
    archive_time = timeit.default_timer()
    if site_name_lower == "onlyfans":
        site_name = "OnlyFans"
        original_api = OnlyFans
        module = m_onlyfans
        if not apis:
            apis = main_helper.process_profiles(json_settings,
                                                original_sessions, site_name,
                                                original_api)
        else:
            for api in apis:
                api.sessions = original_sessions
        subscription_array = []
        auth_count = -1
        jobs = json_site_settings["jobs"]
        subscription_list = module.format_options(apis, "users")
        if not auto_profile_choice:
            print("Choose Profile")
        apis = choose_option(subscription_list, auto_profile_choice)
        apis = [x.pop(0) for x in apis]
        for api in apis:
            module.assign_vars(api.auth.auth_details, json_config,
                               json_site_settings, site_name)
            setup = False
            setup, subscriptions = module.account_setup(api, identifiers, jobs)
            if not setup:
                api.auth.auth_details.active = False
                auth_details = api.auth.auth_details.__dict__
                user_auth_filepath = os.path.join(api.auth.profile_directory,
                                                  "auth.json")
                main_helper.export_json(user_auth_filepath, auth_details)
                continue
            subscription_array += subscriptions
        subscription_list = module.format_options(subscription_array,
                                                  "usernames")
        if jobs["scrape_paid_content"]:
            print("Scraping Paid Content")
            paid_content = module.paid_content_scraper(apis)
        if jobs["scrape_names"]:
            print("Scraping Subscriptions")
            x = main_helper.process_names(module, subscription_list,
                                          auto_scrape_names, apis, json_config,
                                          site_name_lower, site_name)
        x = main_helper.process_downloads(apis, module)
        if webhooks:
            x = main_helper.process_webhooks(apis)
    elif site_name_lower == "starsavn":
        site_name = "StarsAVN"
        original_api = StarsAVN
        module = m_starsavn
        apis = main_helper.process_profiles(json_settings, original_sessions,
                                            site_name, original_api)
        auto_profile_choice = json_site_settings["auto_profile_choice"]
        subscription_array = []
        auth_count = -1
        jobs = json_site_settings["jobs"]
        subscription_list = module.format_options(apis, "users")
        apis = choose_option(subscription_list, auto_profile_choice)
        apis = [x.pop(0) for x in apis]
        for api in apis:
            module.assign_vars(api.auth.auth_details, json_config,
                               json_site_settings, site_name)
            identifier = ""
            setup = False
            setup = module.account_setup(api, identifier=identifier)
            if not setup:
                api.auth.auth_details.active = False
                auth_details = api.auth.auth_details.__dict__
                user_auth_filepath = os.path.join(api.auth.profile_directory,
                                                  "auth.json")
                main_helper.export_json(user_auth_filepath, auth_details)
                continue
            if jobs["scrape_names"]:
                array = module.manage_subscriptions(api,
                                                    auth_count,
                                                    identifier=identifier)
                subscription_array += array
        subscription_list = module.format_options(subscription_array,
                                                  "usernames")
        if jobs["scrape_paid_content"]:
            print("Scraping Paid Content")
            paid_content = module.paid_content_scraper(apis)
        if jobs["scrape_names"]:
            print("Scraping Subscriptions")
            x = main_helper.process_names(module, subscription_list,
                                          auto_scrape_names, apis, json_config,
                                          site_name_lower, site_name)
        x = main_helper.process_downloads(apis, module)
    stop_time = str(int(timeit.default_timer() - archive_time) / 60)[:4]
    print('Archive Completed in ' + stop_time + ' Minutes')
    return apis