def test_cache_merfish(tmpdir, name, expected, config, monkeypatch): cache_enabled = (0 != config["slicedimage"]["caching"].get( "size_limit", None)) if cache_enabled: config["slicedimage"]["caching"]["directory"] = str(tmpdir / "caching") with monkeypatch.context() as mc: setup_config(config, tmpdir, mc) # Run 1 data.MERFISH(use_test_data=True).fov().get_image("primary") # Run 2 if cache_enabled: data.MERFISH(use_test_data=True).fov().get_image("primary") # Check constraints if cache_enabled: # Enforce smallest size cache = Cache(str(tmpdir / "caching")) cache.cull() cache_size = get_size(tmpdir / "caching") min, max = expected assert (min <= cache_size) and (cache_size <= max)
class Webserver: def __init__( self, host="localhost", port=8080, hostname='hostname', workers=os.cpu_count() - 1, max_connections=socket.SOMAXCONN, max_cache_size=4e9, proto='http', ): self._max_connections = max_connections self.host = host self.port = port self.address = f'{proto}://{self.host}:{self.port}' self.hostname = hostname self.max_workers = workers self.routes = {} self.vservers_routes = {} # роуты для вирт. сервера self.regular_routes = {} self._response = None self._current_clients = [] self.data_end = b'\r\n\r\n' self.connect_refresh_timeout = 10 self.client_request_timeout = 0.1 self._max_keep_alive = timedelta(seconds=10) self.cache = Cache(size_limit=int(max_cache_size)) self.cache_expire = 100 self._running = True def set_routes(self, routes): self.routes = {**routes, **self.routes} def get_routes(self): return self.routes def handle_file(self, request, filename, root=os.getcwd(), content_type=None): path = os.path.join(root, filename) # Пробуем взять отдаваемый стат. файл из cache res = self.cache.get(path) if res: return res if not os.path.exists(path): self.cache.set(path, Errors.NOT_FOUND_PAGE, expire=self.cache_expire, tag='data') self.cache.cull() return Errors.NOT_FOUND_PAGE if content_type is None: content_type, _ = mimetypes.guess_type(path) logger.verbose('Content type for %s, was set automatically to %s', path, content_type) response = Response.response_file(request, path, content_type) if response.status == 200: # Если код 200 OK, то добавим response в cache self.cache.set(path, response, expire=self.cache_expire, tag='data') self.cache.cull() return response def handle_dir(self, request, dirname=os.getcwd()): path = os.path.abspath(dirname) if not os.path.exists(path): return Errors.NOT_FOUND_PAGE response = Response.response_dir(request, path) return response def get_routes_for_virtual_server(self, request): vserver_name = request.headers['Host'] routes = self.vservers_routes.get(vserver_name) if routes: logger.debug('Find routes for vservers %s', vserver_name) return routes return self.regular_routes def find_handler(self, request: Request): logger.debug('%s', self.regular_routes) try: response = Errors.NOT_FOUND_PAGE for path_info, handler in self.get_routes_for_virtual_server( request).items(): logger.debug('checking %s %s | %s', path_info.path, path_info.method, handler) path_info: PathInfo = path_info reg = path_info.path match = re.fullmatch(reg, request.url.path) if match and request.method == path_info.method: logger.debug('handler found %s for %s', handler, request.url) if len(match.groupdict().items()) == 0: response = handler(request) else: response = handler(request, match.group(1)) break logger.debug('handler returned %s', response) return response except Exception: logger.exception("Internal Error") return Errors.INTERNAL_SERVER_ERROR def _read_data(self, client, address): _keep_alive_deadline = datetime.now() + self._max_keep_alive request = Request() filled = False while not filled: try: line = client.recv(64 * 1024) if not line: raise ReceiveDone('Client done') split = Request.split_keep_sep(line, bytes(os.linesep, 'utf-8')) for s in split: if request.dynamic_fill(s): filled = True except BlockingIOError: # читали, а нам никто не написал, подождем и может напишут time.sleep(0.5) _current_time = datetime.now() if _current_time > _keep_alive_deadline: # ждали больше чем держим keep-alive соединение raise ConnectionError('Client dead') return request def _start_session(self, client, address): while True: request = self._read_data(client, address) request.print_headers() request.print_body() response = self.find_handler(request) logger.access(method=request.method, url=request.target, protocol=request.version, status=response.status) Response.response(client, response) # закончили обрабатывать запрос от клиента # (получили, ответили, пришли сюда) if request.headers.get('Connection') == 'keep-alive': logger.verbose("Client %s asked for keep-alive connection", address) # если браузер послал keep-alive (что он почти всегда # делает дабы избавиться от поспоянных созданий # подключений и ускорить общение с сервером) client.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) # и продолжаем крутиться в while True: else: # иначе выходим из while True: (клиент не хочет больше # с нами общаться) break def handle(self, client, address): init_thread_local(THREAD_LOCAL_DATA) THREAD_LOCAL_DATA.client = address with client: try: self._start_session(client, address) except ConnectionError: logger.verbose('Client was not alive for %s seconds', self._max_keep_alive) except OSError: logger.exception('Client had an issue') except ReceiveDone: logger.verbose('Client received the content') except Exception: logger.exception('Client had an error') Response.response(client, Errors.INTERNAL_SERVER_ERROR) logger.verbose("Disconnected") def run(self, stop: Event): self._running = True server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self.make_regular_routes() with server_socket: server_socket.bind((self.host, self.port)) server_socket.listen(self._max_connections) server_socket.setblocking(False) logger.verbose(f'Start server on {self.address}') logger.debug('T %s', server_socket.gettimeout()) with ThreadPoolExecutor(max_workers=self.max_workers) as executor: logger.verbose('ThreadPoolExecutor started') while not stop.is_set(): try: self._check_is_running() try: client, address = server_socket.accept() THREAD_LOCAL_DATA.client = address logger.verbose(f'Got client') future = executor.submit(self.handle, client, address) self._current_clients.append(future) except (socket.timeout, BlockingIOError): # никто не подключился, можно подождать # logger.info(f'No client') time.sleep(1) pass except KeyboardInterrupt: logger.verbose('Server shutdown started') break logger.verbose(('ThreadPoolExecutor ' 'waiting for alive connections')) for future in self._current_clients: result = future.result() logger.verbose('ThreadPoolExecutor finished') def _check_is_running(self): if not self._running: raise KeyboardInterrupt def stop(self): self._running = False def route(self, path, method='GET', vserver='localhost'): logger.debug('Adding %s [%s]', path, method) if path in self.routes: raise AssertionError("Such route already exists.") def wrapper(handler): path_info = PathInfo(path, method) if vserver == 'localhost': self.set_routes({path_info: handler}) if vserver in self.vservers_routes: self.vservers_routes[vserver][path_info] = handler logger.debug('Add route %s for vserver %s', path_info, vserver) else: self.vservers_routes[vserver] = {path_info: handler} logger.debug('INIT route %s for vserver %s', path_info, vserver) return handler return wrapper def make_regular_routes(self): logger.debug('Processing routes %s', self.routes) for path_info, handler in self.routes.items(): reg = re.compile(path_info.path) path_info.path = reg self.regular_routes[path_info] = handler