def load_target_files(): """ Load the target files in the database """ textutils.output_info('Loading target files') database.files += loaders.load_targets('data/file.lst')
def load_target_paths(): """ Load the target paths in the database """ textutils.output_info('Loading target paths') database.paths += loaders.load_targets('data/path.lst')
def load_target_files(running_path): """ Load the target files in the database """ textutils.output_info('Loading target files') database.files += loaders.load_targets(running_path + '/data/file.lst')
def load_target_paths(running_path): """ Load the target paths in the database """ textutils.output_info('Loading target paths') database.paths += loaders.load_targets(running_path + '/data/path.lst')
def load_target_paths(): """ Load the target paths in the database """ textutils.output_info('Loading target paths') # Add files database.paths += loaders.load_targets('data/path.lst')
def main(): """ Main app logic """ # Ensure the host is of the right format utils.sanitize_config() # Load target paths utils.output_info('Loading target paths') database.paths = loaders.load_targets('data/path.lst') # Import and run host plugins utils.output_info('Executing ' + str(len(host.__all__)) + ' host plugins') for plugin_name in host.__all__: plugin = __import__ ("plugins.host." + plugin_name, fromlist=[plugin_name]) if hasattr(plugin , 'execute'): plugin.execute() # Spawn workers if conf.search_files or conf.debug: workers = spawn_workers(conf.thread_count, output=False) else: workers = spawn_workers(conf.thread_count) # Fill work queue with fetch list utils.output_info('Probing ' + str(len(database.paths)) + ' paths') for item in database.paths: database.fetch_queue.put(item) # Wait for initial valid path lookup wait_for_idle(workers, database.fetch_queue) utils.output_info('Found ' + str(len(database.valid_paths)) + ' valid paths') if conf.debug: for item in database.valid_paths: utils.output_debug(str(item)) if conf.search_files: # Load target files utils.output_info('Loading target files') database.files = loaders.load_targets('data/file.lst') if conf.debug: for item in database.files: utils.output_debug('Target file added: ' + str(item)) # Combine files with '/' and all valid paths tmp_list = list() for file in database.files: file_copy = dict(file) file_copy['url'] = urljoin(conf.target_host, file_copy['url']) if conf.debug: utils.output('Adding base target: ' + str(file_copy)) tmp_list.append(file_copy) for valid_url in database.valid_paths: file_copy = dict(file) file_copy['url'] = valid_url['url'] + file['url'] if conf.debug: utils.output('Adding combined target: ' + str(file_copy)) tmp_list.append(file_copy) # Fill Valid path with generated urls for item in tmp_list: database.valid_paths.append(item) if conf.debug: for item in database.valid_paths: utils.output_debug('Path to test: ' + str(item)) # Add to valid paths # Import and run file plugins utils.output_info('Executing ' + str(len(path.__all__)) + ' file plugins') for plugin_name in path.__all__: plugin = __import__ ("plugins.path." + plugin_name, fromlist=[plugin_name]) if hasattr(plugin , 'execute'): plugin.execute() # Spawn workers workers = spawn_workers(conf.thread_count) # Fill work queue with fetch list utils.output_info('Probing ' + str(len(database.valid_paths)) + ' items...') for item in database.valid_paths: database.fetch_queue.put(item) # Wait for file lookup wait_for_idle(workers, database.fetch_queue)