def backend_status(self): socket_path_cache = get_socket_path('cache') socket_path_index = get_socket_path('indexing') backend_up = True if not os.path.exists(socket_path_cache): console.print( f'Socket path for the [blue]cache[/blue] redis DB [red]does not exists[/red] ({socket_path_cache}).' ) backend_up = False if not os.path.exists(socket_path_index): console.print( f'Socket path for the [blue]indexing[/blue] redis DB [red]does not exists[/red] ({socket_path_index}).' ) backend_up = False if backend_up: try: cache_reachable = True if self.redis_cache.ping() else False if not cache_reachable: console.print('Unable to ping the redis cache db.') backend_up = False except ConnectionError: console.print('Unable to connect to the redis cache db.') backend_up = False try: indexing_reachable = True if self.redis_indexing.ping( ) else False if not indexing_reachable: console.print('Unable to ping the redis indexing db.') backend_up = False except ConnectionError: console.print('Unable to connect to the redis indexing db.') backend_up = False return backend_up
def main(): get_homedir() p = Popen(['shutdown']) p.wait() try: r = Redis(unix_socket_path=get_socket_path('cache'), db=1) r.delete('shutdown') r = Redis(unix_socket_path=get_socket_path('cache')) r.delete('tree_cache') print('Shutting down databases...') p_backend = run(['run_backend', '--stop']) p_backend.check_returncode() print('done.') except ConnectionError: # Already down, skip the stacktrace pass
async def _to_run_forever_async(self): self.redis: Redis = Redis(unix_socket_path=get_socket_path('cache'), decode_responses=True) while await self.redis.exists('to_capture'): await self.process_capture_queue() if self.shutdown_requested(): break await self.redis.close()
def check_running(name: str) -> bool: socket_path = get_socket_path(name) if not os.path.exists(socket_path): return False try: r = Redis(unix_socket_path=socket_path) return True if r.ping() else False except ConnectionError: return False
def __init__(self, loglevel: int=logging.INFO): super().__init__(loglevel) self.script_name = 'archiver' self.redis = Redis(unix_socket_path=get_socket_path('cache'), decode_responses=True) # make sure archived captures dir exists self.archived_captures_dir = get_homedir() / 'archived_captures' self.archived_captures_dir.mkdir(parents=True, exist_ok=True) self._load_indexes()
def rename_captures(): r = Redis(unix_socket_path=get_socket_path('cache')) capture_dir: Path = get_captures_dir() for uuid_path in capture_dir.glob('*/uuid'): with uuid_path.open() as f: uuid = f.read() dir_key = r.hget('lookup_dirs', uuid) if dir_key: r.hdel('lookup_dirs', uuid) r.delete(dir_key) timestamp = datetime.strptime(uuid_path.parent.name, '%Y-%m-%dT%H:%M:%S.%f') dest_dir = capture_dir / str(timestamp.year) / f'{timestamp.month:02}' safe_create_dir(dest_dir) uuid_path.parent.rename(dest_dir / uuid_path.parent.name)
def _build_ua_file(self): '''Build a file in a format compatible with the capture page''' yesterday = (date.today() - timedelta(days=1)) self_generated_ua_file_path = get_homedir() / 'own_user_agents' / str( yesterday.year) / f'{yesterday.month:02}' safe_create_dir(self_generated_ua_file_path) self_generated_ua_file = self_generated_ua_file_path / f'{yesterday.isoformat()}.json' if self_generated_ua_file.exists(): self.logger.info( f'User-agent file for {yesterday} already exists.') return self.logger.info(f'Generating user-agent file for {yesterday}') redis = Redis(unix_socket_path=get_socket_path('cache'), decode_responses=True) entries = redis.zrevrange(f'user_agents|{yesterday.isoformat()}', 0, -1) if not entries: self.logger.info( f'No User-agent file for {yesterday} to generate.') return to_store: Dict[str, Any] = {'by_frequency': []} uas = Counter([entry.split('|', 1)[1] for entry in entries]) for ua, _ in uas.most_common(): parsed_ua = ParsedUserAgent(ua) if not parsed_ua.platform or not parsed_ua.browser: continue if parsed_ua.platform not in to_store: to_store[parsed_ua.platform] = {} if f'{parsed_ua.browser} {parsed_ua.version}' not in to_store[ parsed_ua.platform]: to_store[parsed_ua.platform][ f'{parsed_ua.browser} {parsed_ua.version}'] = [] to_store[parsed_ua.platform][ f'{parsed_ua.browser} {parsed_ua.version}'].append( parsed_ua.string) to_store['by_frequency'].append({ 'os': parsed_ua.platform, 'browser': f'{parsed_ua.browser} {parsed_ua.version}', 'useragent': parsed_ua.string }) with self_generated_ua_file.open('w') as f: json.dump(to_store, f, indent=2) # Remove the UA / IP mapping. redis.delete(f'user_agents|{yesterday.isoformat()}') self.logger.info(f'User-agent file for {yesterday} generated.')
def shutdown_indexing(storage_directory: Optional[Path] = None): if not storage_directory: storage_directory = get_homedir() r = Redis(unix_socket_path=get_socket_path('indexing')) r.shutdown(save=True) print('Redis indexing database shutdown.')
def shutdown_cache(storage_directory: Optional[Path] = None): if not storage_directory: storage_directory = get_homedir() r = Redis(unix_socket_path=get_socket_path('cache')) r.shutdown(save=True) print('Redis cache database shutdown.')
def __init__(self) -> None: self.redis_cache: Redis = Redis( unix_socket_path=get_socket_path('cache'), decode_responses=True) self.redis_indexing: Redis = Redis( unix_socket_path=get_socket_path('indexing'), decode_responses=True)