async def update(request, response): controller = Controller().instance() gns3_vm = controller.gns3vm await gns3_vm.update_settings(request.json) controller.save() response.json(gns3_vm) response.set_status(201)
async def update(request, response): controller = Controller().instance() iou_license = controller.iou_license iou_license.update(request.json) controller.save() response.json(iou_license) response.set_status(201)
def update(self, **kwargs): from gns3server.controller import Controller controller = Controller.instance() Controller.instance().check_can_write_config() self._settings.update(kwargs) controller.notification.controller_emit("template.updated", self.__json__()) controller.save()
def reload_all(request, response): project = yield from Controller.instance().get_loaded_project( request.match_info["project_id"]) yield from project.stop_all() yield from project.start_all() response.set_status(204)
def idlepc_proposals(request, response): project = yield from Controller.instance().get_loaded_project(request.match_info["project_id"]) node = project.get_node(request.match_info["node_id"]) idle = yield from node.dynamips_idlepc_proposals() response.json(idle) response.set_status(200)
def reload(request, response): project = yield from Controller.instance().get_loaded_project(request.match_info["project_id"]) node = project.get_node(request.match_info["node_id"]) yield from node.reload() response.json(node) response.set_status(201)
async def import_project(request, response): controller = Controller.instance() if request.get("path"): config = Config.instance() if config.get_section_config("Server").getboolean("local", False) is False: response.set_status(403) return path = request.json.get("path") name = request.json.get("name") # We write the content to a temporary location and after we extract it all. # It could be more optimal to stream this but it is not implemented in Python. try: begin = time.time() with tempfile.TemporaryDirectory() as tmpdir: temp_project_path = os.path.join(tmpdir, "project.zip") async with aiofiles.open(temp_project_path, 'wb') as f: while True: chunk = await request.content.read(CHUNK_SIZE) if not chunk: break await f.write(chunk) with open(temp_project_path, "rb") as f: project = await import_project(controller, request.match_info["project_id"], f, location=path, name=name) log.info("Project '{}' imported in {:.4f} seconds".format(project.name, time.time() - begin)) except OSError as e: raise aiohttp.web.HTTPInternalServerError(text="Could not import the project: {}".format(e)) response.json(project) response.set_status(201)
def get_file(request, response): controller = Controller.instance() project = yield from controller.get_loaded_project(request.match_info["project_id"]) path = request.match_info["path"] path = os.path.normpath(path).strip('/') # Raise error if user try to escape if path[0] == ".": raise aiohttp.web.HTTPForbidden path = os.path.join(project.path, path) response.content_type = "application/octet-stream" response.set_status(200) response.enable_chunked_encoding() # Very important: do not send a content length otherwise QT closes the connection (curl can consume the feed) response.content_length = None try: with open(path, "rb") as f: response.start(request) while True: data = f.read(4096) if not data: break yield from response.write(data) except FileNotFoundError: raise aiohttp.web.HTTPNotFound() except PermissionError: raise aiohttp.web.HTTPForbidden()
def import_project(request, response): controller = Controller.instance() if request.get("path"): config = Config.instance() if config.get_section_config("Server").getboolean("local", False) is False: response.set_status(403) return path = request.json.get("path") name = request.json.get("name") # We write the content to a temporary location and after we extract it all. # It could be more optimal to stream this but it is not implemented in Python. # Spooled means the file is temporary kept in memory until max_size is reached try: with tempfile.SpooledTemporaryFile(max_size=10000) as temp: while True: packet = yield from request.content.read(512) if not packet: break temp.write(packet) project = yield from import_project(controller, request.match_info["project_id"], temp, location=path, name=name) except OSError as e: raise aiohttp.web.HTTPInternalServerError(text="Could not import the project: {}".format(e)) response.json(project) response.set_status(201)
def export_project(request, response): controller = Controller.instance() project = yield from controller.get_loaded_project(request.match_info["project_id"]) try: with tempfile.TemporaryDirectory() as tmp_dir: datas = yield from export_project(project, tmp_dir, include_images=bool(request.get("include_images", "0"))) # We need to do that now because export could failed and raise an HTTP error # that why response start need to be the later possible response.content_type = 'application/gns3project' response.headers['CONTENT-DISPOSITION'] = 'attachment; filename="{}.gns3project"'.format(project.name) response.enable_chunked_encoding() # Very important: do not send a content length otherwise QT closes the connection (curl can consume the feed) response.content_length = None response.start(request) for data in datas: response.write(data) yield from response.drain() yield from response.write_eof() # Will be raise if you have no space left or permission issue on your temporary directory # RuntimeError: something was wrong during the zip process except (OSError, RuntimeError) as e: raise aiohttp.web.HTTPNotFound(text="Can't export project: {}".format(str(e)))
def notification_ws(request, response): controller = Controller.instance() project = controller.get_project(request.match_info["project_id"]) ws = aiohttp.web.WebSocketResponse() yield from ws.prepare(request) asyncio.async(process_websocket(ws)) with controller.notification.queue(project) as queue: while True: try: notification = yield from queue.get_json(5) except asyncio.futures.CancelledError as e: break if ws.closed: break ws.send_str(notification) if project.auto_close: # To avoid trouble with client connecting disconnecting we sleep few seconds before checking # if someone else is not connected yield from asyncio.sleep(5) if not controller.notification.project_has_listeners(project): yield from project.close() return ws
def notification(request, response): controller = Controller.instance() project = controller.get_project(request.match_info["project_id"]) response.content_type = "application/json" response.set_status(200) response.enable_chunked_encoding() # Very important: do not send a content length otherwise QT closes the connection (curl can consume the feed) response.content_length = None response.start(request) with controller.notification.queue(project) as queue: while True: try: msg = yield from queue.get_json(5) response.write(("{}\n".format(msg)).encode("utf-8")) except asyncio.futures.CancelledError as e: break yield from response.drain() if project.auto_close: # To avoid trouble with client connecting disconnecting we sleep few seconds before checking # if someone else is not connected yield from asyncio.sleep(5) if not controller.notification.project_has_listeners(project): yield from project.close()
def close(request, response): controller = Controller.instance() project = controller.get_project(request.match_info["project_id"]) yield from project.close() response.set_status(201) response.json(project)
def get_file(request, response): project = yield from Controller.instance().get_loaded_project( request.match_info["project_id"]) node = project.get_node(request.match_info["node_id"]) path = request.match_info["path"] path = force_unix_path(path) # Raise error if user try to escape if path[0] == ".": raise aiohttp.web.HTTPForbidden() node_type = node.node_type path = "/project-files/{}/{}/{}".format(node_type, node.id, path) res = yield from node.compute.http_query( "GET", "/projects/{project_id}/files{path}".format(project_id=project.id, path=path), timeout=None, raw=True) response.set_status(200) response.content_type = "application/octet-stream" response.enable_chunked_encoding() yield from response.prepare(request) response.write(res.body) yield from response.write_eof()
def post_file(request, response): project = yield from Controller.instance().get_loaded_project( request.match_info["project_id"]) node = project.get_node(request.match_info["node_id"]) path = request.match_info["path"] path = force_unix_path(path) # Raise error if user try to escape if path[0] == ".": raise aiohttp.web.HTTPForbidden() node_type = node.node_type path = "/project-files/{}/{}/{}".format(node_type, node.id, path) data = yield from request.content.read() yield from node.compute.http_query( "POST", "/projects/{project_id}/files{path}".format(project_id=project.id, path=path), data=data, timeout=None, raw=True) response.set_status(201)
def write_file(request, response): controller = Controller.instance() project = yield from controller.get_loaded_project(request.match_info["project_id"]) path = request.match_info["path"] path = os.path.normpath(path).strip("/") # Raise error if user try to escape if path[0] == ".": raise aiohttp.web.HTTPForbidden path = os.path.join(project.path, path) response.set_status(200) try: with open(path, 'wb+') as f: while True: packet = yield from request.content.read(512) if not packet: break f.write(packet) except FileNotFoundError: raise aiohttp.web.HTTPNotFound() except PermissionError: raise aiohttp.web.HTTPForbidden() except OSError as e: raise aiohttp.web.HTTPConflict(text=str(e))
async def notification_ws(request, response): controller = Controller.instance() project = controller.get_project(request.match_info["project_id"]) ws = aiohttp.web.WebSocketResponse() await ws.prepare(request) request.app['websockets'].add(ws) asyncio.ensure_future(process_websocket(ws)) log.info("New client has connected to the notification stream for project ID '{}' (WebSocket method)".format(project.id)) try: with controller.notification.project_queue(project.id) as queue: while True: notification = await queue.get_json(5) if ws.closed: break await ws.send_str(notification) finally: log.info("Client has disconnected from notification stream for project ID '{}' (WebSocket method)".format(project.id)) if not ws.closed: await ws.close() request.app['websockets'].discard(ws) if project.auto_close: # To avoid trouble with client connecting disconnecting we sleep few seconds before checking # if someone else is not connected await asyncio.sleep(5) if not controller.notification.project_has_listeners(project.id): log.info("Project '{}' is automatically closing due to no client listening".format(project.id)) await project.close() return ws
def pcap(request, response): project = yield from Controller.instance().get_loaded_project(request.match_info["project_id"]) link = project.get_link(request.match_info["link_id"]) while link.capture_file_path is None: raise aiohttp.web.HTTPNotFound(text="pcap file not found") while not os.path.isfile(link.capture_file_path): yield from asyncio.sleep(0.5) try: with open(link.capture_file_path, "rb") as f: response.content_type = "application/vnd.tcpdump.pcap" response.set_status(200) response.enable_chunked_encoding() yield from response.prepare(request) while True: chunk = f.read(4096) if not chunk: yield from asyncio.sleep(0.1) yield from response.write(chunk) except OSError: raise aiohttp.web.HTTPNotFound(text="pcap file {} not found or not accessible".format(link.capture_file_path))
async def pcap(request, response): project = await Controller.instance().get_loaded_project(request.match_info["project_id"]) ssl_context = Controller.instance().ssl_context() link = project.get_link(request.match_info["link_id"]) if not link.capturing: raise aiohttp.web.HTTPConflict(text="This link has no active packet capture") compute = link.compute pcap_streaming_url = link.pcap_streaming_url() headers = multidict.MultiDict(request.headers) headers['Host'] = compute.host headers['Router-Host'] = request.host body = await request.read() connector = aiohttp.TCPConnector(limit=None, force_close=True, ssl_context=ssl_context) async with aiohttp.ClientSession(connector=connector, headers=headers) as session: async with session.request(request.method, pcap_streaming_url, timeout=None, data=body) as response: proxied_response = aiohttp.web.Response(headers=response.headers, status=response.status) if response.headers.get('Transfer-Encoding', '').lower() == 'chunked': proxied_response.enable_chunked_encoding() await proxied_response.prepare(request) async for data in response.content.iter_any(): if not data: break await proxied_response.write(data)
async def write_file(request, response): controller = Controller.instance() project = await controller.get_loaded_project(request.match_info["project_id"]) path = request.match_info["path"] path = os.path.normpath(path).strip("/") # Raise error if user try to escape if path[0] == ".": raise aiohttp.web.HTTPForbidden() path = os.path.join(project.path, path) response.set_status(200) try: async with aiofiles.open(path, 'wb+') as f: while True: try: chunk = await request.content.read(CHUNK_SIZE) except asyncio.TimeoutError: raise aiohttp.web.HTTPRequestTimeout(text="Timeout when writing to file '{}'".format(path)) if not chunk: break await f.write(chunk) except FileNotFoundError: raise aiohttp.web.HTTPNotFound() except PermissionError: raise aiohttp.web.HTTPForbidden() except OSError as e: raise aiohttp.web.HTTPConflict(text=str(e))
async def open(request, response): controller = Controller.instance() project = controller.get_project(request.match_info["project_id"]) await project.open() response.set_status(201) response.json(project)
def raw(request, response): controller = Controller.instance() try: yield from response.file(controller.symbols.get_path(request.match_info["symbol_id"])) except (KeyError, FileNotFoundError, PermissionError): response.set_status(404)
def shutdown(request, response): config = Config.instance() if config.get_section_config("Server").getboolean("local", False) is False: raise HTTPForbidden(text="You can only stop a local server") log.info("Start shutting down the server") # close all the projects first controller = Controller.instance() projects = controller.projects.values() tasks = [] for project in projects: tasks.append(asyncio. async (project.close())) if tasks: done, _ = yield from asyncio.wait(tasks) for future in done: try: future.result() except Exception as e: log.error("Could not close project {}".format(e), exc_info=1) continue # then shutdown the server itself from gns3server.web.web_server import WebServer server = WebServer.instance() asyncio. async (server.shutdown_server()) response.set_status(201)
def notification(request, response): controller = Controller.instance() project = controller.get_project(request.match_info["project_id"]) response.content_type = "application/json" response.set_status(200) response.enable_chunked_encoding() yield from response.prepare(request) with controller.notification.queue(project) as queue: while True: try: msg = yield from queue.get_json(5) response.write(("{}\n".format(msg)).encode("utf-8")) except asyncio.futures.CancelledError as e: break yield from response.drain() if project.auto_close: # To avoid trouble with client connecting disconnecting we sleep few seconds before checking # if someone else is not connected yield from asyncio.sleep(5) if not controller.notification.project_has_listeners(project): yield from project.close()
async def delete(request, response): controller = Controller.instance() project = controller.get_project(request.match_info["project_id"]) await project.delete() controller.remove_project(project) response.set_status(204)
async def write_file(request, response): controller = Controller.instance() project = await controller.get_loaded_project( request.match_info["project_id"]) path = request.match_info["path"] path = os.path.normpath(path).strip("/") # Raise error if user try to escape if path[0] == ".": raise aiohttp.web.HTTPForbidden() path = os.path.join(project.path, path) response.set_status(200) try: async with aiofiles.open(path, 'wb+') as f: while True: try: chunk = await request.content.read(CHUNK_SIZE) except asyncio.TimeoutError: raise aiohttp.web.HTTPRequestTimeout( text="Timeout when writing to file '{}'".format( path)) if not chunk: break await f.write(chunk) except FileNotFoundError: raise aiohttp.web.HTTPNotFound() except PermissionError: raise aiohttp.web.HTTPForbidden() except OSError as e: raise aiohttp.web.HTTPConflict(text=str(e))
def update(request, response): project = yield from Controller.instance().get_loaded_project(request.match_info["project_id"]) link = project.get_link(request.match_info["link_id"]) yield from link.update_nodes(request.json["nodes"]) response.set_status(201) response.json(link)
def get_drawing(request, response): project = yield from Controller.instance().get_loaded_project( request.match_info["project_id"]) drawing = project.get_drawing(request.match_info["drawing_id"]) response.set_status(200) response.json(drawing)
def update(request, response): project = yield from Controller.instance().get_loaded_project(request.match_info["project_id"]) drawing = project.get_drawing(request.match_info["drawing_id"]) yield from drawing.update(**request.json) response.set_status(201) response.json(drawing)
def stop_capture(request, response): project = yield from Controller.instance().get_loaded_project(request.match_info["project_id"]) link = project.get_link(request.match_info["link_id"]) yield from link.stop_capture() response.set_status(201) response.json(link)
def get_file(request, response): controller = Controller.instance() project = yield from controller.get_loaded_project( request.match_info["project_id"]) path = request.match_info["path"] path = os.path.normpath(path).strip('/') # Raise error if user try to escape if path[0] == ".": raise aiohttp.web.HTTPForbidden() path = os.path.join(project.path, path) response.content_type = "application/octet-stream" response.set_status(200) response.enable_chunked_encoding() try: with open(path, "rb") as f: yield from response.prepare(request) while True: data = f.read(4096) if not data: break yield from response.write(data) except FileNotFoundError: raise aiohttp.web.HTTPNotFound() except PermissionError: raise aiohttp.web.HTTPForbidden()
def write_file(request, response): controller = Controller.instance() project = yield from controller.get_loaded_project( request.match_info["project_id"]) path = request.match_info["path"] path = os.path.normpath(path).strip("/") # Raise error if user try to escape if path[0] == ".": raise aiohttp.web.HTTPForbidden() path = os.path.join(project.path, path) response.set_status(200) try: with open(path, 'wb+') as f: while True: packet = yield from request.content.read(512) if not packet: break f.write(packet) except FileNotFoundError: raise aiohttp.web.HTTPNotFound() except PermissionError: raise aiohttp.web.HTTPForbidden() except OSError as e: raise aiohttp.web.HTTPConflict(text=str(e))
def import_project(request, response): controller = Controller.instance() if request.get("path"): config = Config.instance() if config.get_section_config("Server").getboolean("local", False) is False: response.set_status(403) return path = request.json.get("path") name = request.json.get("name") # We write the content to a temporary location and after we extract it all. # It could be more optimal to stream this but it is not implemented in Python. # Spooled means the file is temporary kept in memory until max_size is reached try: with tempfile.SpooledTemporaryFile(max_size=10000) as temp: while True: packet = yield from request.content.read(512) if not packet: break temp.write(packet) project = yield from import_project( controller, request.match_info["project_id"], temp, location=path, name=name) except OSError as e: raise aiohttp.web.HTTPInternalServerError( text="Could not import the project: {}".format(e)) response.json(project) response.set_status(201)
def export_project(request, response): controller = Controller.instance() project = yield from controller.get_loaded_project( request.match_info["project_id"]) try: with tempfile.TemporaryDirectory() as tmp_dir: datas = yield from export_project( project, tmp_dir, include_images=bool( int(request.query.get("include_images", "0")))) # We need to do that now because export could failed and raise an HTTP error # that why response start need to be the later possible response.content_type = 'application/gns3project' response.headers[ 'CONTENT-DISPOSITION'] = 'attachment; filename="{}.gns3project"'.format( project.name) response.enable_chunked_encoding() yield from response.prepare(request) for data in datas: response.write(data) yield from response.drain() yield from response.write_eof() # Will be raise if you have no space left or permission issue on your temporary directory # RuntimeError: something was wrong during the zip process except (OSError, RuntimeError) as e: raise aiohttp.web.HTTPNotFound( text="Can't export project: {}".format(str(e)))
def notification_ws(request, response): controller = Controller.instance() project = controller.get_project(request.match_info["project_id"]) ws = aiohttp.web.WebSocketResponse() yield from ws.prepare(request) asyncio. async (process_websocket(ws)) with controller.notification.queue(project) as queue: while True: try: notification = yield from queue.get_json(5) except asyncio.futures.CancelledError as e: break if ws.closed: break ws.send_str(notification) if project.auto_close: # To avoid trouble with client connecting disconnecting we sleep few seconds before checking # if someone else is not connected yield from asyncio.sleep(5) if not controller.notification.project_has_listeners(project): yield from project.close() return ws
async def duplicate(request, response): controller = Controller.instance() project = await controller.get_loaded_project( request.match_info["project_id"]) if request.json.get("path"): config = Config.instance() if config.get_section_config("Server").getboolean("local", False) is False: response.set_status(403) return location = request.json.get("path") else: location = None reset_mac_addresses = request.json.get("reset_mac_addresses", False) new_project = await project.duplicate( name=request.json.get("name"), location=location, reset_mac_addresses=reset_mac_addresses) response.json(new_project) response.set_status(201)
def shutdown(request, response): config = Config.instance() if config.get_section_config("Server").getboolean("local", False) is False: raise HTTPForbidden(text="You can only stop a local server") log.info("Start shutting down the server") # close all the projects first controller = Controller.instance() projects = controller.projects.values() tasks = [] for project in projects: tasks.append(asyncio.async(project.close())) if tasks: done, _ = yield from asyncio.wait(tasks) for future in done: try: future.result() except Exception as e: log.error("Could not close project {}".format(e), exc_info=1) continue # then shutdown the server itself from gns3server.web.web_server import WebServer server = WebServer.instance() asyncio.async(server.shutdown_server()) response.set_status(201)
def start_capture(request, response): project = yield from Controller.instance().get_loaded_project(request.match_info["project_id"]) link = project.get_link(request.match_info["link_id"]) yield from link.start_capture(data_link_type=request.json.get("data_link_type", "DLT_EN10MB"), capture_file_name=request.json.get("capture_file_name")) response.set_status(201) response.json(link)
def create(request, response): project = yield from Controller.instance().get_loaded_project( request.match_info["project_id"]) drawing = yield from project.add_drawing(**request.json) response.set_status(201) response.json(drawing)
def pcap(request, response): project = yield from Controller.instance().get_loaded_project(request.match_info["project_id"]) link = project.get_link(request.match_info["link_id"]) if link.capture_file_path is None: raise aiohttp.web.HTTPNotFound(text="pcap file not found") try: with open(link.capture_file_path, "rb") as f: response.content_type = "application/vnd.tcpdump.pcap" response.set_status(200) response.enable_chunked_encoding() # Very important: do not send a content length otherwise QT closes the connection (curl can consume the feed) response.content_length = None response.start(request) while True: chunk = f.read(4096) if not chunk: yield from asyncio.sleep(0.1) yield from response.write(chunk) except OSError: raise aiohttp.web.HTTPNotFound(text="pcap file {} not found or not accessible".format(link.capture_file_path))
async def notification(request, response): controller = Controller.instance() project = controller.get_project(request.match_info["project_id"]) response.content_type = "application/json" response.set_status(200) response.enable_chunked_encoding() await response.prepare(request) log.info( "New client has connected to the notification stream for project ID '{}' (HTTP long-polling method)" .format(project.id)) try: with controller.notification.project_queue(project.id) as queue: while True: msg = await queue.get_json(5) await response.write(("{}\n".format(msg)).encode("utf-8")) finally: log.info( "Client has disconnected from notification for project ID '{}' (HTTP long-polling method)" .format(project.id)) if project.auto_close: # To avoid trouble with client connecting disconnecting we sleep few seconds before checking # if someone else is not connected await asyncio.sleep(5) if not controller.notification.project_has_listeners( project.id): log.info( "Project '{}' is automatically closing due to no client listening" .format(project.id)) await project.close()
async def upload(request, response): controller = Controller.instance() symbol_id = urllib.parse.unquote(request.match_info["symbol_id"]) path = os.path.join(controller.symbols.symbols_path(), os.path.basename(symbol_id)) try: with open(path, "wb") as f: while True: try: chunk = await request.content.read(1024) except asyncio.TimeoutError: raise aiohttp.web.HTTPRequestTimeout( text="Timeout when writing to symbol '{}'".format( path)) if not chunk: break f.write(chunk) except (UnicodeEncodeError, OSError) as e: raise aiohttp.web.HTTPConflict( text="Could not write symbol file '{}': {}".format(path, e)) # Reset the symbol list controller.symbols.list() response.set_status(204)
def create(request, response): controller = Controller.instance() template = controller.template_manager.add_template(request.json) # Reset the symbol list controller.symbols.list() response.set_status(201) response.json(template)
async def restore(request, response): controller = Controller.instance() project = controller.get_project(request.match_info["project_id"]) snapshot = project.get_snapshot(request.match_info["snapshot_id"]) project = await snapshot.restore() response.set_status(201) response.json(project)
async def raw(request, response): controller = Controller.instance() try: await response.stream_file(controller.symbols.get_path(request.match_info["symbol_id"])) except (KeyError, OSError) as e: log.warning("Could not get symbol file: {}".format(e)) response.set_status(404)
async def list_appliances(request, response): controller = Controller.instance() if request.query.get("update", "no").lower() == "yes": await controller.appliance_manager.download_appliances() symbol_theme = request.query.get("symbol_theme", "Classic") controller.appliance_manager.load_appliances(symbol_theme=symbol_theme) response.json([c for c in controller.appliance_manager.appliances.values()])
def create(request, response): controller = Controller.instance() compute = controller.get_compute(request.json.pop("compute_id")) project = yield from controller.get_loaded_project(request.match_info["project_id"]) node = yield from project.add_node(compute, request.json.pop("name"), request.json.pop("node_id", None), **request.json) response.set_status(201) response.json(node)
async def update(request, response): project = Controller.instance().get_project(request.match_info["project_id"]) # Ignore these because we only use them when creating a project request.json.pop("project_id", None) await project.update(**request.json) response.set_status(200) response.json(project)