def start_datascraper(): parser = ArgumentParser() parser.add_argument("-m", "--metadata", action='store_true', help="only exports metadata") args = parser.parse_args() if args.metadata: print("Exporting Metadata Only") log_error = main_helper.setup_logger('errors', 'errors.log') console = logging.StreamHandler() console.setLevel(logging.DEBUG) formatter = logging.Formatter( '%(asctime)s %(levelname)s %(name)s %(message)s') console.setFormatter(formatter) logging.getLogger("").addHandler(console) # root = os.getcwd() config_path = os.path.join('.settings', 'config.json') json_config, json_config2 = main_helper.get_config(config_path) json_settings = json_config["settings"] json_sites = json_config["supported"] infinite_loop = json_settings["infinite_loop"] global_user_agent = json_settings['global_user_agent'] domain = json_settings["auto_site_choice"] path = os.path.join('.settings', 'extra_auth.json') # extra_auth_config, extra_auth_config2 = main_helper.get_config(path) extra_auth_config = {} exit_on_completion = json_settings['exit_on_completion'] loop_timeout = json_settings['loop_timeout'] main_helper.assign_vars(json_config) string, site_names = module_chooser(domain, json_sites) try: while True: if domain: if site_names: site_name = domain else: print(string) continue else: print(string) x = input() if x == "x": break x = int(x) site_name = site_names[x] site_name_lower = site_name.lower() json_auth_array = [json_sites[site_name_lower]["auth"]] json_site_settings = json_sites[site_name_lower]["settings"] auto_scrape_names = json_site_settings["auto_scrape_names"] extra_auth_settings = json_sites[site_name_lower][ "extra_auth_settings"] if "extra_auth_settings" in json_sites[ site_name_lower] else { "extra_auth": False } extra_auth = extra_auth_settings["extra_auth"] if extra_auth: choose_auth = extra_auth_settings["choose_auth"] merge_auth = extra_auth_settings["merge_auth"] json_auth_array += extra_auth_config["supported"][ site_name_lower]["auths"] if choose_auth: json_auth_array = main_helper.choose_auth(json_auth_array) apis = [] module = m_onlyfans subscription_array = [] legacy = True original_sessions = api_helper.create_session( settings=json_settings) if not original_sessions: print("Unable to create session") continue archive_time = timeit.default_timer() if site_name_lower == "onlyfans": site_name = "OnlyFans" subscription_array = [] auth_count = -1 jobs = json_site_settings["jobs"] for json_auth in json_auth_array: api = OnlyFans.start(original_sessions) auth_count += 1 user_agent = global_user_agent if not json_auth[ 'user_agent'] else json_auth['user_agent'] module = m_onlyfans module.assign_vars(json_auth, json_config, json_site_settings, site_name) api.set_auth_details(**json_auth, global_user_agent=user_agent) identifier = "" setup = module.account_setup(api, identifier=identifier) if not setup: continue if jobs["scrape_names"]: array = module.manage_subscriptions( api, auth_count, identifier=identifier) subscription_array += array apis.append(api) subscription_list = module.format_options( subscription_array, "usernames") if jobs["scrape_paid_content"]: print("Scraping Paid Content") paid_content = module.paid_content_scraper(apis) if jobs["scrape_names"]: print("Scraping Subscriptions") x = main_helper.process_names(module, subscription_list, auto_scrape_names, json_auth_array, apis, json_config, site_name_lower, site_name) x = main_helper.process_downloads(apis, module) print elif site_name_lower == "starsavn": site_name = "StarsAVN" subscription_array = [] auth_count = -1 for json_auth in json_auth_array: sessions = api_helper.copy_sessions(original_sessions) api = StarsAVN.start(sessions) auth_count += 1 user_agent = global_user_agent if not json_auth[ 'user_agent'] else json_auth['user_agent'] module = m_starsavn module.assign_vars(json_auth, json_config, json_site_settings, site_name) api.set_auth_details(**json_auth, global_user_agent=user_agent) setup = module.account_setup(api) if not setup: continue jobs = json_site_settings["jobs"] if jobs["scrape_names"]: array = module.manage_subscriptions(api, auth_count) subscription_array += array if jobs["scrape_paid_content"]: paid_contents = api.get_paid_content() paid_content = module.paid_content_scraper(api) apis.append(api) subscription_array = module.format_options( subscription_array, "usernames") stop_time = str(int(timeit.default_timer() - archive_time) / 60)[:4] print('Archive Completed in ' + stop_time + ' Minutes') if exit_on_completion: print("Now exiting.") exit(0) elif not infinite_loop: print("Input anything to continue") input() elif loop_timeout: print('Pausing scraper for ' + loop_timeout + ' seconds.') time.sleep(int(loop_timeout)) except Exception as e: log_error.exception(e) input()
main_test.check_config() main_test.check_profiles() if __name__ == "__main__": import datascraper.main_datascraper as main_datascraper import helpers.main_helper as main_helper config_path = os.path.join(".settings", "config.json") json_config, json_config2 = main_helper.get_config(config_path) json_settings = json_config["settings"] exit_on_completion = json_settings["exit_on_completion"] infinite_loop = json_settings["infinite_loop"] loop_timeout = json_settings["loop_timeout"] json_sites = json_config["supported"] domain = json_settings["auto_site_choice"] string, site_names = main_helper.module_chooser(domain, json_sites) # logging.basicConfig(level=logging.DEBUG, format="%(message)s") while True: try: if domain: if site_names: site_name = domain else: print(string) continue else: print(string) x = input() if x == "x": break
if __name__ == "__main__": import apis.api_helper as api_helper import datascraper.main_datascraper as main_datascraper import helpers.main_helper as main_helper api_helper.parsed_args = parsed_args config_path = Path(".settings", "config.json") config, _updated = main_helper.get_config(config_path) global_settings = config.settings exit_on_completion = global_settings.exit_on_completion infinite_loop = global_settings.infinite_loop loop_timeout = global_settings.loop_timeout domain = global_settings.auto_site_choice json_sites = config.supported string, site_names_ = main_helper.module_chooser(domain, json_sites.__dict__) site_name_literals = Literal["OnlyFans", "Fansly", "StarsAVN"] site_names: list[site_name_literals] = list(get_args(site_name_literals)) # logging.basicConfig(level=logging.DEBUG, format="%(message)s") async def main(): while True: site_options = OptionsFormat(site_names, "sites", domain) for site_name in site_options.final_choices: api = await main_datascraper.start_datascraper( config, site_name) if api: api.close_pools() await asyncio.sleep(1) if exit_on_completion: print("Now exiting.")