def backend_status(self): socket_path_cache = get_socket_path('cache') socket_path_index = get_socket_path('indexing') backend_up = True if not os.path.exists(socket_path_cache): console.print( f'Socket path for the [blue]cache[/blue] redis DB [red]does not exists[/red] ({socket_path_cache}).' ) backend_up = False if not os.path.exists(socket_path_index): console.print( f'Socket path for the [blue]indexing[/blue] redis DB [red]does not exists[/red] ({socket_path_index}).' ) backend_up = False if backend_up: try: cache_reachable = True if self.redis_cache.ping() else False if not cache_reachable: console.print('Unable to ping the redis cache db.') backend_up = False except ConnectionError: console.print('Unable to connect to the redis cache db.') backend_up = False try: indexing_reachable = True if self.redis_indexing.ping( ) else False if not indexing_reachable: console.print('Unable to ping the redis indexing db.') backend_up = False except ConnectionError: console.print('Unable to connect to the redis indexing db.') backend_up = False return backend_up
def main(): r = StrictRedis(unix_socket_path=get_socket_path('cache')) r.delete('cache_loaded') website_dir = get_homedir() / 'website' ip = get_config('generic', 'website_listen_ip') port = get_config('generic', 'website_listen_port') try: p = Popen([ 'gunicorn', '-w', '10', '--graceful-timeout', '2', '--timeout', '300', '-b', f'{ip}:{port}', '--log-level', 'info', 'web:app' ], cwd=website_dir) set_running('website') while True: if shutdown_requested() or p.poll() is not None: break time.sleep(1) except KeyboardInterrupt: print('Website killed by user.') finally: print('Shutting down website.') try: # Killing everything if possible. p.send_signal(signal.SIGWINCH) p.send_signal(signal.SIGTERM) except Exception: pass unset_running('website')
def main(): get_homedir() p = Popen(['shutdown']) p.wait() r = Redis(unix_socket_path=get_socket_path('cache'), db=1) r.delete('shutdown') Popen(['run_backend', '--stop'])
def check_running(name: str) -> bool: socket_path = get_socket_path(name) if not os.path.exists(socket_path): return False try: r = Redis(unix_socket_path=socket_path) return True if r.ping() else False except ConnectionError: return False
def __init__(self, loglevel: int = logging.INFO): super().__init__(loglevel) self.lookyloo = Lookyloo() self.script_name = 'async_capture' self.only_global_lookups: bool = get_config('generic', 'only_global_lookups') self.capture_dir: Path = get_captures_dir() self.splash_url: str = get_splash_url() self.redis = Redis(unix_socket_path=get_socket_path('cache'), decode_responses=True)
def __init__(self, loglevel: int=logging.INFO): super().__init__(loglevel) self.script_name = 'archiver' self.redis = Redis(unix_socket_path=get_socket_path('cache')) # make sure archived captures dir exists self.archived_captures_dir = get_homedir() / 'archived_captures' self.archived_captures_dir.mkdir(parents=True, exist_ok=True) self._load_indexes()
def main(): r = StrictRedis(unix_socket_path=get_socket_path('cache'), db=1) r.set('shutdown', 1) time.sleep(5) while True: running = is_running() if not running: break print(running) time.sleep(5)
def rename_captures(): r = Redis(unix_socket_path=get_socket_path('cache')) capture_dir: Path = get_captures_dir() for uuid_path in capture_dir.glob('*/uuid'): with uuid_path.open() as f: uuid = f.read() dir_key = r.hget('lookup_dirs', uuid) if dir_key: r.hdel('lookup_dirs', uuid) r.delete(dir_key) timestamp = datetime.strptime(uuid_path.parent.name, '%Y-%m-%dT%H:%M:%S.%f') dest_dir = capture_dir / str(timestamp.year) / f'{timestamp.month:02}' safe_create_dir(dest_dir) uuid_path.parent.rename(dest_dir / uuid_path.parent.name)
def _build_ua_file(self): '''Build a file in a format compatible with the capture page''' yesterday = (date.today() - timedelta(days=1)) self_generated_ua_file_path = get_homedir() / 'own_user_agents' / str( yesterday.year) / f'{yesterday.month:02}' safe_create_dir(self_generated_ua_file_path) self_generated_ua_file = self_generated_ua_file_path / f'{yesterday.isoformat()}.json' if self_generated_ua_file.exists(): self.logger.info( f'User-agent file for {yesterday} already exists.') return self.logger.info(f'Generating user-agent file for {yesterday}') redis = Redis(unix_socket_path=get_socket_path('cache'), decode_responses=True) entries = redis.zrevrange(f'user_agents|{yesterday.isoformat()}', 0, -1) if not entries: self.logger.info( f'No User-agent file for {yesterday} to generate.') return to_store: Dict[str, Any] = {'by_frequency': []} uas = Counter([entry.split('|', 1)[1] for entry in entries]) for ua, _ in uas.most_common(): parsed_ua = UserAgent(ua) if not parsed_ua.platform or not parsed_ua.browser: continue if parsed_ua.platform not in to_store: to_store[parsed_ua.platform] = {} if f'{parsed_ua.browser} {parsed_ua.version}' not in to_store[ parsed_ua.platform]: to_store[parsed_ua.platform][ f'{parsed_ua.browser} {parsed_ua.version}'] = [] to_store[parsed_ua.platform][ f'{parsed_ua.browser} {parsed_ua.version}'].append( parsed_ua.string) to_store['by_frequency'].append({ 'os': parsed_ua.platform, 'browser': f'{parsed_ua.browser} {parsed_ua.version}', 'useragent': parsed_ua.string }) with self_generated_ua_file.open('w') as f: json.dump(to_store, f, indent=2) # Remove the UA / IP mapping. redis.delete(f'user_agents|{yesterday.isoformat()}') self.logger.info(f'User-agent file for {yesterday} generated.')
def __init__(self) -> None: self.redis_cache: Redis = Redis( unix_socket_path=get_socket_path('cache'), decode_responses=True) self.redis_indexing: Redis = Redis( unix_socket_path=get_socket_path('indexing'), decode_responses=True)
#!/usr/bin/env python3 # -*- coding: utf-8 -*- from lookyloo.helpers import is_running, get_socket_path import time from redis import StrictRedis if __name__ == '__main__': r = StrictRedis(unix_socket_path=get_socket_path('cache'), db=1) r.set('shutdown', 1) time.sleep(5) while True: running = is_running() if not running: break print(running) time.sleep(5)