def start_datascraper(json_config, site_name_lower, apis: list = [], webhooks=True): json_settings = json_config["settings"] json_webhooks = json_settings["webhooks"] json_sites = json_config["supported"] domain = json_settings["auto_site_choice"] main_helper.assign_vars(json_config) json_site_settings = json_sites[site_name_lower]["settings"] auto_scrape_names = json_site_settings["auto_scrape_names"] if isinstance(auto_scrape_names, str): temp_identifiers = auto_scrape_names.split(",") identifiers = [x for x in temp_identifiers if x] else: identifiers = [] auto_profile_choice = json_site_settings["auto_profile_choice"] subscription_array = [] original_sessions = [] original_sessions = api_helper.create_session(settings=json_settings) original_sessions = [x for x in original_sessions if x] if not original_sessions: print("Unable to create session") return False archive_time = timeit.default_timer() if site_name_lower == "onlyfans": site_name = "OnlyFans" original_api = OnlyFans module = m_onlyfans if not apis: session_manager = api_helper.session_manager() apis = main_helper.process_profiles(json_settings, session_manager, site_name, original_api) subscription_array = [] auth_count = -1 jobs = json_site_settings["jobs"] subscription_list = module.format_options(apis, "users") if not auto_profile_choice: print("Choose Profile") apis = choose_option(subscription_list, auto_profile_choice) apis = [x.pop(0) for x in apis] for api in apis: api.session_manager.copy_sessions(original_sessions) module.assign_vars(api.auth.auth_details, json_config, json_site_settings, site_name) setup = False setup, subscriptions = module.account_setup(api, identifiers, jobs) if not setup: if webhooks: x = main_helper.process_webhooks([api], "auth_webhook", "failed") auth_details = {} auth_details["auth"] = api.auth.auth_details.__dict__ profile_directory = api.auth.profile_directory if profile_directory: user_auth_filepath = os.path.join( api.auth.profile_directory, "auth.json") main_helper.export_data(auth_details, user_auth_filepath) continue subscription_array += subscriptions x = main_helper.process_webhooks([api], "auth_webhook", "succeeded") subscription_list = module.format_options(subscription_array, "usernames") if jobs["scrape_paid_content"]: print("Scraping Paid Content") paid_content = module.paid_content_scraper(apis, identifiers) if jobs["scrape_names"]: print("Scraping Subscriptions") names = main_helper.process_names(module, subscription_list, auto_scrape_names, apis, json_config, site_name_lower, site_name) x = main_helper.process_downloads(apis, module) if webhooks: x = main_helper.process_webhooks(apis, "download_webhook", "succeeded") elif site_name_lower == "starsavn": site_name = "StarsAVN" original_api = StarsAVN module = m_starsavn apis = main_helper.process_profiles(json_settings, original_sessions, site_name, original_api) auto_profile_choice = json_site_settings["auto_profile_choice"] subscription_array = [] auth_count = -1 jobs = json_site_settings["jobs"] subscription_list = module.format_options(apis, "users") apis = choose_option(subscription_list, auto_profile_choice) apis = [x.pop(0) for x in apis] for api in apis: module.assign_vars(api.auth.auth_details, json_config, json_site_settings, site_name) identifier = "" setup = False setup = module.account_setup(api, identifier=identifier) if not setup: auth_details = api.auth.auth_details.__dict__ user_auth_filepath = os.path.join(api.auth.profile_directory, "auth.json") main_helper.export_data(auth_details, user_auth_filepath) continue if jobs["scrape_names"]: array = module.manage_subscriptions(api, auth_count, identifier=identifier) subscription_array += array subscription_list = module.format_options(subscription_array, "usernames") if jobs["scrape_paid_content"]: print("Scraping Paid Content") paid_content = module.paid_content_scraper(apis) if jobs["scrape_names"]: print("Scraping Subscriptions") names = main_helper.process_names(module, subscription_list, auto_scrape_names, apis, json_config, site_name_lower, site_name) x = main_helper.process_downloads(apis, module) stop_time = str(int(timeit.default_timer() - archive_time) / 60)[:4] print('Archive Completed in ' + stop_time + ' Minutes') return apis
def start_datascraper(): parser = ArgumentParser() parser.add_argument("-m", "--metadata", action='store_true', help="only exports metadata") args = parser.parse_args() if args.metadata: print("Exporting Metadata Only") log_error = main_helper.setup_logger('errors', 'errors.log') console = logging.StreamHandler() console.setLevel(logging.DEBUG) formatter = logging.Formatter( '%(asctime)s %(levelname)s %(name)s %(message)s') console.setFormatter(formatter) logging.getLogger("").addHandler(console) # root = os.getcwd() config_path = os.path.join('.settings', 'config.json') json_config, json_config2 = main_helper.get_config(config_path) json_settings = json_config["settings"] json_sites = json_config["supported"] infinite_loop = json_settings["infinite_loop"] global_user_agent = json_settings['global_user_agent'] domain = json_settings["auto_site_choice"] path = os.path.join('.settings', 'extra_auth.json') # extra_auth_config, extra_auth_config2 = main_helper.get_config(path) extra_auth_config = {} exit_on_completion = json_settings['exit_on_completion'] loop_timeout = json_settings['loop_timeout'] main_helper.assign_vars(json_config) string, site_names = module_chooser(domain, json_sites) try: while True: if domain: if site_names: site_name = domain else: print(string) continue else: print(string) x = input() if x == "x": break x = int(x) site_name = site_names[x] site_name_lower = site_name.lower() json_auth_array = [json_sites[site_name_lower]["auth"]] json_site_settings = json_sites[site_name_lower]["settings"] auto_scrape_names = json_site_settings["auto_scrape_names"] extra_auth_settings = json_sites[site_name_lower][ "extra_auth_settings"] if "extra_auth_settings" in json_sites[ site_name_lower] else { "extra_auth": False } extra_auth = extra_auth_settings["extra_auth"] if extra_auth: choose_auth = extra_auth_settings["choose_auth"] merge_auth = extra_auth_settings["merge_auth"] json_auth_array += extra_auth_config["supported"][ site_name_lower]["auths"] if choose_auth: json_auth_array = main_helper.choose_auth(json_auth_array) apis = [] module = m_onlyfans subscription_array = [] legacy = True original_sessions = api_helper.create_session( settings=json_settings) if not original_sessions: print("Unable to create session") continue archive_time = timeit.default_timer() if site_name_lower == "onlyfans": site_name = "OnlyFans" subscription_array = [] auth_count = -1 jobs = json_site_settings["jobs"] for json_auth in json_auth_array: api = OnlyFans.start(original_sessions) auth_count += 1 user_agent = global_user_agent if not json_auth[ 'user_agent'] else json_auth['user_agent'] module = m_onlyfans module.assign_vars(json_auth, json_config, json_site_settings, site_name) api.set_auth_details(**json_auth, global_user_agent=user_agent) identifier = "" setup = module.account_setup(api, identifier=identifier) if not setup: continue if jobs["scrape_names"]: array = module.manage_subscriptions( api, auth_count, identifier=identifier) subscription_array += array apis.append(api) subscription_list = module.format_options( subscription_array, "usernames") if jobs["scrape_paid_content"]: print("Scraping Paid Content") paid_content = module.paid_content_scraper(apis) if jobs["scrape_names"]: print("Scraping Subscriptions") x = main_helper.process_names(module, subscription_list, auto_scrape_names, json_auth_array, apis, json_config, site_name_lower, site_name) x = main_helper.process_downloads(apis, module) print elif site_name_lower == "starsavn": site_name = "StarsAVN" subscription_array = [] auth_count = -1 for json_auth in json_auth_array: sessions = api_helper.copy_sessions(original_sessions) api = StarsAVN.start(sessions) auth_count += 1 user_agent = global_user_agent if not json_auth[ 'user_agent'] else json_auth['user_agent'] module = m_starsavn module.assign_vars(json_auth, json_config, json_site_settings, site_name) api.set_auth_details(**json_auth, global_user_agent=user_agent) setup = module.account_setup(api) if not setup: continue jobs = json_site_settings["jobs"] if jobs["scrape_names"]: array = module.manage_subscriptions(api, auth_count) subscription_array += array if jobs["scrape_paid_content"]: paid_contents = api.get_paid_content() paid_content = module.paid_content_scraper(api) apis.append(api) subscription_array = module.format_options( subscription_array, "usernames") stop_time = str(int(timeit.default_timer() - archive_time) / 60)[:4] print('Archive Completed in ' + stop_time + ' Minutes') if exit_on_completion: print("Now exiting.") exit(0) elif not infinite_loop: print("Input anything to continue") input() elif loop_timeout: print('Pausing scraper for ' + loop_timeout + ' seconds.') time.sleep(int(loop_timeout)) except Exception as e: log_error.exception(e) input()
def start_datascraper(json_config: dict, site_name_lower: str, api: Optional[OnlyFans.start] = None, webhooks=True): json_settings = json_config["settings"] json_webhooks = json_settings["webhooks"] json_sites = json_config["supported"] domain = json_settings["auto_site_choice"] main_helper.assign_vars(json_config) json_site_settings = json_sites[site_name_lower]["settings"] auto_model_choice = json_site_settings["auto_model_choice"] if isinstance(auto_model_choice, str): temp_identifiers = auto_model_choice.split(",") identifiers = [x for x in temp_identifiers if x] else: identifiers = [] auto_profile_choice = json_site_settings["auto_profile_choice"] subscription_array = [] original_sessions = [] original_sessions = api_helper.create_session( settings=json_settings) original_sessions = [x for x in original_sessions if x] if not original_sessions: print("Unable to create session") return False archive_time = timeit.default_timer() if site_name_lower == "onlyfans": site_name = "OnlyFans" module = m_onlyfans if not api: api = OnlyFans.start() api = main_helper.process_profiles( json_settings, original_sessions, site_name, api) print subscription_array = [] auth_count = 0 jobs = json_site_settings["jobs"] subscription_list = module.format_options( api.auths, "users") if not auto_profile_choice: print("Choose Profile") auths = choose_option( subscription_list, auto_profile_choice, True) api.auths = [x.pop(0) for x in auths] for auth in api.auths: if not auth.auth_details: continue module.assign_vars(auth.auth_details, json_config, json_site_settings, site_name) setup = False setup, subscriptions = module.account_setup( auth, identifiers, jobs, auth_count) if not setup: if webhooks: x = main_helper.process_webhooks( api, "auth_webhook", "failed") auth_details = {} auth_details["auth"] = auth.auth_details.__dict__ profile_directory = auth.profile_directory if profile_directory: user_auth_filepath = os.path.join( auth.profile_directory, "auth.json") main_helper.export_data( auth_details, user_auth_filepath) continue auth_count += 1 subscription_array += subscriptions x = main_helper.process_webhooks( api, "auth_webhook", "succeeded") subscription_list = module.format_options( subscription_array, "usernames") if jobs["scrape_paid_content"]: print("Scraping Paid Content") paid_content = module.paid_content_scraper(api, identifiers) if jobs["scrape_names"]: print("Scraping Subscriptions") names = main_helper.process_names( module, subscription_list, auto_model_choice, api, json_config, site_name_lower, site_name) x = main_helper.process_downloads(api, module) if webhooks: x = main_helper.process_webhooks( api, "download_webhook", "succeeded") elif site_name_lower == "starsavn": pass # site_name = "StarsAVN" # original_api = StarsAVN # module = m_starsavn # apis = main_helper.process_profiles( # json_settings, original_sessions, site_name, original_api) # auto_profile_choice = json_site_settings["auto_profile_choice"] # subscription_array = [] # auth_count = -1 # jobs = json_site_settings["jobs"] # subscription_list = module.format_options( # apis, "users") # apis = choose_option( # subscription_list, auto_profile_choice) # apis = [x.pop(0) for x in apis] # for api in apis: # module.assign_vars(api.auth.auth_details, json_config, # json_site_settings, site_name) # identifier = "" # setup = False # setup = module.account_setup(api, identifier=identifier) # if not setup: # auth_details = api.auth.auth_details.__dict__ # user_auth_filepath = os.path.join( # api.auth.profile_directory, "auth.json") # main_helper.export_data( # auth_details, user_auth_filepath) # continue # if jobs["scrape_names"]: # array = module.manage_subscriptions( # api, auth_count, identifier=identifier) # subscription_array += array # subscription_list = module.format_options( # subscription_array, "usernames") # if jobs["scrape_paid_content"]: # print("Scraping Paid Content") # paid_content = module.paid_content_scraper(apis) # if jobs["scrape_names"]: # print("Scraping Subscriptions") # names = main_helper.process_names( # module, subscription_list, auto_model_choice, apis, json_config, site_name_lower, site_name) # x = main_helper.process_downloads(apis, module) stop_time = str( int(timeit.default_timer() - archive_time) / 60)[:4] print('Archive Completed in ' + stop_time + ' Minutes') return api
def start_datascraper(json_config, site_name_lower, apis: list = [], webhooks=True): json_settings = json_config["settings"] json_sites = json_config["supported"] domain = json_settings["auto_site_choice"] main_helper.assign_vars(json_config) json_site_settings = json_sites[site_name_lower]["settings"] auto_scrape_names = json_site_settings["auto_scrape_names"] if isinstance(auto_scrape_names, str): temp_identifiers = auto_scrape_names.split(",") identifiers = [x for x in temp_identifiers if x] else: identifiers = [] auto_profile_choice = json_site_settings["auto_profile_choice"] subscription_array = [] original_sessions = [] original_sessions = api_helper.create_session(settings=json_settings) original_sessions = [x for x in original_sessions if x] if not original_sessions: print("Unable to create session") return False session_manager = api_helper.session_manager() session_manager.sessions = original_sessions session_manager = api_helper.stimulate_sessions(session_manager) # offset_array = ["https://checkip.amazonaws.com"]*2 # pool = api_helper.multiprocessing() # count = 60 # while True: # test_ip2 = pool.starmap(multi, product( # offset_array, [original_sessions[0]]))[0] # if test_ip == test_ip2: # print(f"SAME IP: {test_ip2} - WAITING {count} second(s)") # time.sleep(count) # else: # print(f"NEW IP: {test_ip2} - TIME: {count}") # print # count+=1 # print archive_time = timeit.default_timer() if site_name_lower == "onlyfans": site_name = "OnlyFans" original_api = OnlyFans module = m_onlyfans if not apis: apis = main_helper.process_profiles(json_settings, session_manager, site_name, original_api) else: for api in apis: api.session_manager = session_manager subscription_array = [] auth_count = -1 jobs = json_site_settings["jobs"] subscription_list = module.format_options(apis, "users") if not auto_profile_choice: print("Choose Profile") apis = choose_option(subscription_list, auto_profile_choice) apis = [x.pop(0) for x in apis] for api in apis: module.assign_vars(api.auth.auth_details, json_config, json_site_settings, site_name) setup = False setup, subscriptions = module.account_setup(api, identifiers, jobs) if not setup: auth_details = {} auth_details["auth"] = api.auth.auth_details.__dict__ profile_directory = api.auth.profile_directory if profile_directory: user_auth_filepath = os.path.join( api.auth.profile_directory, "auth.json") main_helper.export_data(auth_details, user_auth_filepath) continue subscription_array += subscriptions subscription_list = module.format_options(subscription_array, "usernames") if jobs["scrape_paid_content"]: print("Scraping Paid Content") paid_content = module.paid_content_scraper(apis, identifiers) if jobs["scrape_names"]: print("Scraping Subscriptions") names = main_helper.process_names(module, subscription_list, auto_scrape_names, apis, json_config, site_name_lower, site_name) x = main_helper.process_downloads(apis, module) if webhooks: x = main_helper.process_webhooks(apis) elif site_name_lower == "starsavn": site_name = "StarsAVN" original_api = StarsAVN module = m_starsavn apis = main_helper.process_profiles(json_settings, original_sessions, site_name, original_api) auto_profile_choice = json_site_settings["auto_profile_choice"] subscription_array = [] auth_count = -1 jobs = json_site_settings["jobs"] subscription_list = module.format_options(apis, "users") apis = choose_option(subscription_list, auto_profile_choice) apis = [x.pop(0) for x in apis] for api in apis: module.assign_vars(api.auth.auth_details, json_config, json_site_settings, site_name) identifier = "" setup = False setup = module.account_setup(api, identifier=identifier) if not setup: auth_details = api.auth.auth_details.__dict__ user_auth_filepath = os.path.join(api.auth.profile_directory, "auth.json") main_helper.export_data(auth_details, user_auth_filepath) continue if jobs["scrape_names"]: array = module.manage_subscriptions(api, auth_count, identifier=identifier) subscription_array += array subscription_list = module.format_options(subscription_array, "usernames") if jobs["scrape_paid_content"]: print("Scraping Paid Content") paid_content = module.paid_content_scraper(apis) if jobs["scrape_names"]: print("Scraping Subscriptions") names = main_helper.process_names(module, subscription_list, auto_scrape_names, apis, json_config, site_name_lower, site_name) x = main_helper.process_downloads(apis, module) stop_time = str(int(timeit.default_timer() - archive_time) / 60)[:4] print('Archive Completed in ' + stop_time + ' Minutes') session_manager.kill = True return apis