def check_profiles(): file_name = "config.json" path = os.path.join('.settings', file_name) import helpers.main_helper as main_helper from apis.onlyfans.onlyfans import auth_details json_config, json_config2 = main_helper.get_config(path) json_settings = json_config["settings"] profile_directories = json_settings["profile_directories"] profile_directory = profile_directories[0] matches = ["OnlyFans"] for match in matches: q = os.path.abspath(profile_directory) profile_site_directory = os.path.join(q, match) if os.path.exists(profile_site_directory): e = os.listdir(profile_site_directory) e = [os.path.join(profile_site_directory, x, "auth.json") for x in e] e = [x for x in e if os.path.exists(x)] if e: continue default_profile_directory = os.path.join( profile_site_directory, "default") os.makedirs(default_profile_directory, exist_ok=True) auth_filepath = os.path.join(default_profile_directory, "auth.json") if not os.path.exists(auth_filepath): new_item = {} new_item["auth"] = auth_details().export() main_helper.export_data(new_item, auth_filepath) string = f"{auth_filepath} has been created. Fill in the relevant details and then press enter to continue." input(string) print print
def check_config(): file_name = "config.json" path = os.path.join('.settings', file_name) json_config, json_config2 = main_helper.get_config(path) if json_config != json_config2: input( f"The .settings\\{file_name} file has been updated. Fill in whatever you need to fill in and then press enter when done.\n" )
def check_config(): file_name = "config.json" path = os.path.join('.settings', file_name) import helpers.main_helper as main_helper json_config, updated = main_helper.get_config(path) if updated: input( f"The .settings\\{file_name} file has been updated. Fill in whatever you need to fill in and then press enter when done.\n") return json_config
def check_profiles(): config_path = Path(".settings", "config.json") import helpers.main_helper as main_helper from apis.onlyfans.onlyfans import auth_details as onlyfans_auth_details from apis.fansly.fansly import auth_details as fansly_auth_details from apis.starsavn.starsavn import auth_details as starsavn_auth_details config, _updated = main_helper.get_config(config_path) settings = config.settings profile_directories = settings.profile_directories profile_directory = profile_directories[0] matches = ["OnlyFans", "Fansly", "StarsAVN"] for string_match in matches: profile_site_directory = profile_directory.joinpath(string_match) if os.path.exists(profile_site_directory): e = os.listdir(profile_site_directory) e = [os.path.join(profile_site_directory, x, "auth.json") for x in e] e = [x for x in e if os.path.exists(x)] if e: continue default_profile_directory = profile_site_directory.joinpath("default") os.makedirs(default_profile_directory, exist_ok=True) auth_filepath = default_profile_directory.joinpath("auth.json") if not os.path.exists(auth_filepath): new_item: dict[str, Any] = {} match string_match: case "OnlyFans": new_item["auth"] = onlyfans_auth_details().export() case "Fansly": new_item["auth"] = fansly_auth_details().export() case "StarsAVN": new_item["auth"] = starsavn_auth_details().export() case _: continue main_helper.export_json(new_item, auth_filepath) main_helper.prompt_modified( f"{auth_filepath} has been created. Fill in the relevant details and then press enter to continue.", auth_filepath, ) print print
def check_config(): file_name = "config.json" path = os.path.join('.settings', file_name) json_config, json_config2 = main_helper.get_config(path) if json_config: new_settings = json_config["settings"].copy() for key, value in json_config["settings"].items(): if key == "socks5_proxy": if not isinstance(value, list): new_settings[key] = [value] if key == "global_user-agent": new_settings["global_user_agent"] = value del new_settings["global_user-agent"] json_config["settings"] = new_settings for key, value in json_config["supported"].items(): settings = value["settings"] if "directory" in settings: if not settings["directory"]: settings["directory"] = ["{site_name}"] settings["download_path"] = settings["directory"] del settings["directory"] if "download_path" in settings: settings["download_paths"] = [settings["download_path"]] del settings["download_path"] file_name_format = settings["file_name_format"] top = ["{id}"] bottom = ["{media_id}"] z = list(zip(top, bottom)) for x in z: if x[0] in file_name_format: settings["file_name_format"] = file_name_format.replace( x[0], x[1]) new = settings["file_name_format"] print("Changed " + file_name_format + " to " + new + " for " + key) if json_config != json_config2: main_helper.update_config(json_config) input( f"The .settings\\{file_name} file has been updated. Fill in whatever you need to fill in and then press enter when done.\n" )
import tests.main_test as main_test import os import time import traceback import logging main_test.version_check() main_test.check_config() main_test.check_profiles() if __name__ == "__main__": import datascraper.main_datascraper as main_datascraper import helpers.main_helper as main_helper config_path = os.path.join(".settings", "config.json") json_config, json_config2 = main_helper.get_config(config_path) json_settings = json_config["settings"] exit_on_completion = json_settings["exit_on_completion"] infinite_loop = json_settings["infinite_loop"] loop_timeout = json_settings["loop_timeout"] json_sites = json_config["supported"] domain = json_settings["auto_site_choice"] string, site_names = main_helper.module_chooser(domain, json_sites) # logging.basicConfig(level=logging.DEBUG, format="%(message)s") while True: try: if domain: if site_names: site_name = domain else:
def start_datascraper(): parser = ArgumentParser() parser.add_argument("-m", "--metadata", action='store_true', help="only exports metadata") args = parser.parse_args() if args.metadata: print("Exporting Metadata Only") log_error = main_helper.setup_logger('errors', 'errors.log') console = logging.StreamHandler() console.setLevel(logging.DEBUG) formatter = logging.Formatter( '%(asctime)s %(levelname)s %(name)s %(message)s') console.setFormatter(formatter) logging.getLogger("").addHandler(console) # root = os.getcwd() config_path = os.path.join('.settings', 'config.json') json_config, json_config2 = main_helper.get_config(config_path) json_settings = json_config["settings"] json_sites = json_config["supported"] infinite_loop = json_settings["infinite_loop"] global_user_agent = json_settings['global_user_agent'] domain = json_settings["auto_site_choice"] path = os.path.join('.settings', 'extra_auth.json') extra_auth_config = json.load(open(path)) exit_on_completion = json_settings['exit_on_completion'] loop_timeout = json_settings['loop_timeout'] string = "Site: " site_names = [] bl = ["patreon"] if not domain: site_count = len(json_sites) count = 0 for x in json_sites: if x in bl: continue string += str(count) + " = " + x site_names.append(x) if count + 1 != site_count: string += " | " count += 1 string += "x = Exit" try: while True: if domain: site_name = domain else: print(string) x = input() if x == "x": break x = int(x) site_name = site_names[x] site_name_lower = site_name.lower() json_auth_array = [json_sites[site_name_lower]["auth"]] json_site_settings = json_sites[site_name_lower]["settings"] auto_scrape_names = json_site_settings["auto_scrape_names"] extra_auth_settings = json_sites[site_name_lower][ "extra_auth_settings"] if "extra_auth_settings" in json_sites[ site_name_lower] else { "extra_auth": False } extra_auth = extra_auth_settings["extra_auth"] if extra_auth: choose_auth = extra_auth_settings["choose_auth"] merge_auth = extra_auth_settings["merge_auth"] json_auth_array += extra_auth_config[site_name_lower][ "extra_auth"] if choose_auth: json_auth_array = main_helper.choose_auth(json_auth_array) session_array = [] x = onlyfans app_token = "" subscription_array = [] legacy = True if site_name_lower == "onlyfans": legacy = False site_name = "OnlyFans" subscription_array = [] auth_count = -1 x.assign_vars(json_config, json_site_settings, site_name) for json_auth in json_auth_array: auth_count += 1 app_token = json_auth['app_token'] user_agent = global_user_agent if not json_auth[ 'user_agent'] else json_auth['user_agent'] x = onlyfans session = x.create_session() if not session: print("Unable to create session") continue session = x.create_auth(session, user_agent, app_token, json_auth) session_array.append(session) if not session["session"]: continue # x.get_paid_posts(session["session"],app_token) cookies = session["session"].cookies.get_dict() auth_id = cookies["auth_id"] json_auth['auth_id'] = auth_id json_auth['auth_uniq_'] = cookies["auth_uniq_" + auth_id] json_auth['auth_hash'] = cookies["auth_hash"] json_auth['sess'] = cookies["sess"] json_auth['fp'] = cookies["fp"] if json_config != json_config2: update_config(json_config) me_api = session["me_api"] array = x.get_subscriptions(session["session"], app_token, session["subscriber_count"], me_api, auth_count) subscription_array += array subscription_array = x.format_options(subscription_array, "usernames") if site_name_lower == "patreon": legacy = False site_name = "Patreon" subscription_array = [] auth_count = -1 x = patreon x.assign_vars(json_config, json_site_settings, site_name) for json_auth in json_auth_array: auth_count += 1 user_agent = global_user_agent if not json_auth[ 'user_agent'] else json_auth['user_agent'] session = x.create_session() session = x.create_auth(session, user_agent, json_auth) session_array.append(session) if not session["session"]: continue cookies = session["session"].cookies.get_dict() json_auth['session_id'] = cookies["session_id"] if json_config != json_config2: update_config(json_config) me_api = session["me_api"] array = x.get_subscriptions(session["session"], auth_count) subscription_array += array subscription_array = x.format_options(subscription_array, "usernames") elif site_name_lower == "starsavn": legacy = False site_name = "StarsAVN" subscription_array = [] auth_count = -1 x = starsavn x.assign_vars(json_config, json_site_settings, site_name) for json_auth in json_auth_array: auth_count += 1 user_agent = global_user_agent if not json_auth[ 'user_agent'] else json_auth['user_agent'] sess = json_auth['sess'] auth_array = dict() auth_array["sess"] = sess session = x.create_session() session = x.create_auth(session, user_agent, app_token, json_auth) session_array.append(session) if not session["session"]: continue me_api = session["me_api"] array = x.get_subscriptions(session["session"], app_token, session["subscriber_count"], me_api, auth_count) subscription_array += array subscription_array = x.format_options(subscription_array, "usernames") elif site_name == "fourchan": x = fourchan site_name = "4Chan" x.assign_vars(json_config, json_site_settings, site_name) session_array = [x.create_session()] array = x.get_subscriptions() subscription_array = x.format_options(array) elif site_name == "bbwchan": x = bbwchan site_name = "BBWChan" x.assign_vars(json_config, json_site_settings, site_name) session_array = [x.create_session()] array = x.get_subscriptions() subscription_array = x.format_options(array) names = subscription_array[0] if names: print("Names: Username = username | " + subscription_array[1]) if not auto_scrape_names: value = input().strip() if value.isdigit(): if value == "0": names = names[1:] else: names = [names[int(value)]] else: names = [name for name in names if value in name[1]] else: value = 0 names = names[1:] else: print("There's nothing to scrape.") continue start_time = timeit.default_timer() download_list = [] for name in names: # Extra Auth Support if not legacy: json_auth = json_auth_array[name[0]] auth_count = name[0] session = session_array[auth_count]["session"] name = name[-1] else: session = session_array[0]["session"] main_helper.assign_vars(json_config) username = main_helper.parse_links(site_name_lower, name) result = x.start_datascraper(session, username, site_name, app_token, choice_type=value) if not args.metadata: download_list.append(result) for y in download_list: for arg in y[1]: x.download_media(*arg) stop_time = str(int(timeit.default_timer() - start_time) / 60) print('Task Completed in ' + stop_time + ' Minutes') if exit_on_completion: print("Now exiting.") exit(0) elif not infinite_loop: print("Input anything to continue") input() elif loop_timeout: print('Pausing scraper for ' + loop_timeout + ' seconds.') time.sleep(int(loop_timeout)) except Exception as e: log_error.exception(e) input()
directory=directory_manager.root_metadata_directory, ) p_r.api_type = api_type result: list[api_table] = database_session.query(api_table_).all() metadata = getattr(subscription.temp_scraped, api_type) await fix_directories( result, subscription, database_session, api_type, ) database_session.close() return metadata if __name__ == "__main__": # WORK IN PROGRESS from classes.make_settings import Config from apis.onlyfans import onlyfans as OnlyFans import helpers.main_helper as main_helper config_path = Path(".settings", "config.json") config, _updated = main_helper.get_config(config_path) api_ = OnlyFans.start( config=config, ) subscription = "" input("WIP") exit()
def check_config(): import helpers.main_helper as main_helper config_path = Path(".settings", "config.json") json_config, _updated = main_helper.get_config(config_path) return json_config
else: while not os.path.exists(settings_directory): config = os.path.join('.settings', 'config.json') json_config, json_config2 = main_helper2.get_config(config) directory = json_config["ofd_directory"] if not directory: input( "Add the OnlyFans Datascraper directory to .settings/config.json and press enter\n") continue settings_directory = os.path.join(directory, ".settings") while True: config = os.path.join(settings_directory, 'config.json') sys.path.append(directory) import helpers.main_helper as main_helper from classes.prepare_metadata import prepare_metadata json_config, json_config = main_helper.get_config(config) if not json_config: input( "Add the OnlyFans Datascraper config filepath to .settings/config.json and press enter\n") continue print json_config = json_config["supported"] choices = ["All"] + list(json_config.keys()) count = 0 max_count = len(choices) string = "" for choice in choices: string += str(count) + " = "+choice count += 1 if count < max_count:
def start_datascraper(): parser = ArgumentParser() parser.add_argument("-m", "--metadata", action='store_true', help="only exports metadata") args = parser.parse_args() if args.metadata: print("Exporting Metadata Only") log_error = main_helper.setup_logger('errors', 'errors.log') console = logging.StreamHandler() console.setLevel(logging.DEBUG) formatter = logging.Formatter( '%(asctime)s %(levelname)s %(name)s %(message)s') console.setFormatter(formatter) logging.getLogger("").addHandler(console) # root = os.getcwd() config_path = os.path.join('.settings', 'config.json') json_config, json_config2 = main_helper.get_config(config_path) json_settings = json_config["settings"] json_sites = json_config["supported"] infinite_loop = json_settings["infinite_loop"] global_user_agent = json_settings['global_user_agent'] domain = json_settings["auto_site_choice"] path = os.path.join('.settings', 'extra_auth.json') # extra_auth_config, extra_auth_config2 = main_helper.get_config(path) extra_auth_config = {} exit_on_completion = json_settings['exit_on_completion'] loop_timeout = json_settings['loop_timeout'] main_helper.assign_vars(json_config) string, site_names = module_chooser(domain, json_sites) try: while True: if domain: if site_names: site_name = domain else: print(string) continue else: print(string) x = input() if x == "x": break x = int(x) site_name = site_names[x] site_name_lower = site_name.lower() json_auth_array = [json_sites[site_name_lower]["auth"]] json_site_settings = json_sites[site_name_lower]["settings"] auto_scrape_names = json_site_settings["auto_scrape_names"] extra_auth_settings = json_sites[site_name_lower][ "extra_auth_settings"] if "extra_auth_settings" in json_sites[ site_name_lower] else { "extra_auth": False } extra_auth = extra_auth_settings["extra_auth"] if extra_auth: choose_auth = extra_auth_settings["choose_auth"] merge_auth = extra_auth_settings["merge_auth"] json_auth_array += extra_auth_config["supported"][ site_name_lower]["auths"] if choose_auth: json_auth_array = main_helper.choose_auth(json_auth_array) apis = [] module = m_onlyfans subscription_array = [] legacy = True original_sessions = api_helper.create_session( settings=json_settings) if not original_sessions: print("Unable to create session") continue archive_time = timeit.default_timer() if site_name_lower == "onlyfans": site_name = "OnlyFans" subscription_array = [] auth_count = -1 jobs = json_site_settings["jobs"] for json_auth in json_auth_array: api = OnlyFans.start(original_sessions) auth_count += 1 user_agent = global_user_agent if not json_auth[ 'user_agent'] else json_auth['user_agent'] module = m_onlyfans module.assign_vars(json_auth, json_config, json_site_settings, site_name) api.set_auth_details(**json_auth, global_user_agent=user_agent) identifier = "" setup = module.account_setup(api, identifier=identifier) if not setup: continue if jobs["scrape_names"]: array = module.manage_subscriptions( api, auth_count, identifier=identifier) subscription_array += array apis.append(api) subscription_list = module.format_options( subscription_array, "usernames") if jobs["scrape_paid_content"]: print("Scraping Paid Content") paid_content = module.paid_content_scraper(apis) if jobs["scrape_names"]: print("Scraping Subscriptions") x = main_helper.process_names(module, subscription_list, auto_scrape_names, json_auth_array, apis, json_config, site_name_lower, site_name) x = main_helper.process_downloads(apis, module) print elif site_name_lower == "starsavn": site_name = "StarsAVN" subscription_array = [] auth_count = -1 for json_auth in json_auth_array: sessions = api_helper.copy_sessions(original_sessions) api = StarsAVN.start(sessions) auth_count += 1 user_agent = global_user_agent if not json_auth[ 'user_agent'] else json_auth['user_agent'] module = m_starsavn module.assign_vars(json_auth, json_config, json_site_settings, site_name) api.set_auth_details(**json_auth, global_user_agent=user_agent) setup = module.account_setup(api) if not setup: continue jobs = json_site_settings["jobs"] if jobs["scrape_names"]: array = module.manage_subscriptions(api, auth_count) subscription_array += array if jobs["scrape_paid_content"]: paid_contents = api.get_paid_content() paid_content = module.paid_content_scraper(api) apis.append(api) subscription_array = module.format_options( subscription_array, "usernames") stop_time = str(int(timeit.default_timer() - archive_time) / 60)[:4] print('Archive Completed in ' + stop_time + ' Minutes') if exit_on_completion: print("Now exiting.") exit(0) elif not infinite_loop: print("Input anything to continue") input() elif loop_timeout: print('Pausing scraper for ' + loop_timeout + ' seconds.') time.sleep(int(loop_timeout)) except Exception as e: log_error.exception(e) input()