def delete(request, response): # check the project_id exists ProjectManager.instance().get_project(request.match_info["project_id"]) yield from VirtualBox.instance().delete_node( request.match_info["node_id"]) response.set_status(204)
def test_get_application_id_multiple_project(loop, iou): vm1_id = str(uuid.uuid4()) vm2_id = str(uuid.uuid4()) vm3_id = str(uuid.uuid4()) project1 = ProjectManager.instance().create_project(project_id=str(uuid.uuid4())) project2 = ProjectManager.instance().create_project(project_id=str(uuid.uuid4())) vm1 = loop.run_until_complete(iou.create_node("PC 1", project1.id, vm1_id)) vm2 = loop.run_until_complete(iou.create_node("PC 2", project1.id, vm2_id)) vm3 = loop.run_until_complete(iou.create_node("PC 2", project2.id, vm3_id)) assert vm1.application_id == 1 assert vm2.application_id == 2 assert vm3.application_id == 3
def test_get_application_id_multiple_project(loop, iou): vm1_id = str(uuid.uuid4()) vm2_id = str(uuid.uuid4()) vm3_id = str(uuid.uuid4()) project1 = ProjectManager.instance().create_project(project_id=str(uuid.uuid4())) project2 = ProjectManager.instance().create_project(project_id=str(uuid.uuid4())) loop.run_until_complete(iou.create_node("PC 1", project1.id, vm1_id)) loop.run_until_complete(iou.create_node("PC 2", project1.id, vm2_id)) loop.run_until_complete(iou.create_node("PC 2", project2.id, vm3_id)) assert iou.get_application_id(vm1_id) == 1 assert iou.get_application_id(vm2_id) == 2 assert iou.get_application_id(vm3_id) == 3
async def list_files(request, response): pm = ProjectManager.instance() project = pm.get_project(request.match_info["project_id"]) files = await project.list_files() response.json(files) response.set_status(200)
async def notification(request, response): pm = ProjectManager.instance() project = pm.get_project(request.match_info["project_id"]) response.content_type = "application/json" response.set_status(200) response.enable_chunked_encoding() response.start(request) queue = project.get_listen_queue() ProjectHandler._notifications_listening.setdefault(project.id, 0) ProjectHandler._notifications_listening[project.id] += 1 await response.write("{}\n".format(json.dumps(ProjectHandler._getPingMessage())).encode("utf-8")) while True: try: (action, msg) = await asyncio.wait_for(queue.get(), 5) if hasattr(msg, "__json__"): msg = json.dumps({"action": action, "event": msg.__json__()}, sort_keys=True) else: msg = json.dumps({"action": action, "event": msg}, sort_keys=True) log.debug("Send notification: %s", msg) await response.write(("{}\n".format(msg)).encode("utf-8")) except asyncio.TimeoutError: await response.write("{}\n".format(json.dumps(ProjectHandler._getPingMessage())).encode("utf-8")) project.stop_listen_queue(queue) if project.id in ProjectHandler._notifications_listening: ProjectHandler._notifications_listening[project.id] -= 1
def stream_file(request, response): pm = ProjectManager.instance() project = pm.get_project(request.match_info["project_id"]) path = request.match_info["path"] path = os.path.normpath(path) # Raise an error if user try to escape if path[0] == ".": raise aiohttp.web.HTTPForbidden path = os.path.join(project.path, path) response.content_type = "application/octet-stream" response.set_status(200) response.enable_chunked_encoding() # Very important: do not send a content length otherwise QT closes the connection (curl can consume the feed) response.content_length = None try: with open(path, "rb") as f: response.start(request) while True: data = f.read(4096) if not data: yield from asyncio.sleep(0.1) yield from response.write(data) except FileNotFoundError: raise aiohttp.web.HTTPNotFound() except PermissionError: raise aiohttp.web.HTTPForbidden()
async def delete(request, response): pm = ProjectManager.instance() project = pm.get_project(request.match_info["project_id"]) await project.delete() pm.remove_project(project.id) response.set_status(204)
def notification(request, response): pm = ProjectManager.instance() project = pm.get_project(request.match_info["project_id"]) response.content_type = "application/json" response.set_status(200) response.enable_chunked_encoding() # Very important: do not send a content length otherwise QT closes the connection (curl can consume the feed) response.content_length = None response.start(request) queue = project.get_listen_queue() ProjectHandler._notifications_listening.setdefault(project.id, 0) ProjectHandler._notifications_listening[project.id] += 1 response.write("{}\n".format(json.dumps(ProjectHandler._getPingMessage())).encode("utf-8")) while True: try: (action, msg) = yield from asyncio.wait_for(queue.get(), 5) if hasattr(msg, "__json__"): msg = json.dumps({"action": action, "event": msg.__json__()}, sort_keys=True) else: msg = json.dumps({"action": action, "event": msg}, sort_keys=True) log.debug("Send notification: %s", msg) response.write(("{}\n".format(msg)).encode("utf-8")) except asyncio.futures.CancelledError as e: break except asyncio.futures.TimeoutError: response.write("{}\n".format(json.dumps(ProjectHandler._getPingMessage())).encode("utf-8")) project.stop_listen_queue(queue) if project.id in ProjectHandler._notifications_listening: ProjectHandler._notifications_listening[project.id] -= 1
async def test_stream_file(compute_api, tmpdir): with patch("gns3server.config.Config.get_section_config", return_value={"projects_path": str(tmpdir)}): project = ProjectManager.instance().create_project( project_id="01010203-0405-0607-0809-0a0b0c0d0e0b") with open(os.path.join(project.path, "hello"), "w+") as f: f.write("world") response = await compute_api.get( "/projects/{project_id}/files/hello".format(project_id=project.id), raw=True) assert response.status == 200 assert response.body == b"world" response = await compute_api.get( "/projects/{project_id}/files/false".format(project_id=project.id), raw=True) assert response.status == 404 response = await compute_api.get( "/projects/{project_id}/files/../hello".format(project_id=project.id), raw=True) assert response.status == 404
async def write_file(request, response): pm = ProjectManager.instance() project = pm.get_project(request.match_info["project_id"]) path = request.match_info["path"] path = os.path.normpath(path) # Raise error if user try to escape if path[0] == ".": raise aiohttp.web.HTTPForbidden() path = os.path.join(project.path, path) response.set_status(200) try: os.makedirs(os.path.dirname(path), exist_ok=True) with open(path, 'wb+') as f: while True: try: chunk = await request.content.read(CHUNK_SIZE) except asyncio.TimeoutError: raise aiohttp.web.HTTPRequestTimeout(text="Timeout when writing to file '{}'".format(path)) if not chunk: break f.write(chunk) except FileNotFoundError: raise aiohttp.web.HTTPNotFound() except PermissionError: raise aiohttp.web.HTTPForbidden()
def import_project(request, response): pm = ProjectManager.instance() project_id = request.match_info["project_id"] project = pm.create_project(project_id=project_id) # We write the content to a temporary location and after we extract it all. # It could be more optimal to stream this but it is not implemented in Python. # Spooled means the file is temporary kept in memory until max_size is reached try: with tempfile.SpooledTemporaryFile(max_size=10000) as temp: while True: packet = yield from request.content.read(512) if not packet: break temp.write(packet) project.import_zip(temp, gns3vm=bool( int(request.GET.get("gns3vm", "1")))) except OSError as e: raise aiohttp.web.HTTPInternalServerError( text="Could not import the project: {}".format(e)) response.json(project) response.set_status(201)
def write_file(request, response): pm = ProjectManager.instance() project = pm.get_project(request.match_info["project_id"]) path = request.match_info["path"] path = os.path.normpath(path) # Raise error if user try to escape if path[0] == ".": raise aiohttp.web.HTTPForbidden() path = os.path.join(project.path, path) response.set_status(200) try: os.makedirs(os.path.dirname(path), exist_ok=True) with open(path, 'wb+') as f: while True: packet = yield from request.content.read(512) if not packet: break f.write(packet) except FileNotFoundError: raise aiohttp.web.HTTPNotFound() except PermissionError: raise aiohttp.web.HTTPForbidden()
async def notification(request, response): pm = ProjectManager.instance() project = pm.get_project(request.match_info["project_id"]) response.content_type = "application/json" response.set_status(200) response.enable_chunked_encoding() response.start(request) queue = project.get_listen_queue() ProjectHandler._notifications_listening.setdefault(project.id, 0) ProjectHandler._notifications_listening[project.id] += 1 await response.write("{}\n".format(json.dumps(ProjectHandler._getPingMessage())).encode("utf-8")) while True: try: (action, msg) = await asyncio.wait_for(queue.get(), 5) if hasattr(msg, "__json__"): msg = json.dumps({"action": action, "event": msg.__json__()}, sort_keys=True) else: msg = json.dumps({"action": action, "event": msg}, sort_keys=True) log.debug("Send notification: %s", msg) await response.write(("{}\n".format(msg)).encode("utf-8")) except asyncio.futures.TimeoutError: await response.write("{}\n".format(json.dumps(ProjectHandler._getPingMessage())).encode("utf-8")) project.stop_listen_queue(queue) if project.id in ProjectHandler._notifications_listening: ProjectHandler._notifications_listening[project.id] -= 1
def stream_file(request, response): pm = ProjectManager.instance() project = pm.get_project(request.match_info["project_id"]) path = request.match_info["path"] path = os.path.normpath(path) # Raise an error if user try to escape if path[0] == ".": raise aiohttp.web.HTTPForbidden() path = os.path.join(project.path, path) response.content_type = "application/octet-stream" response.set_status(200) response.enable_chunked_encoding() try: with open(path, "rb") as f: yield from response.prepare(request) while True: data = f.read(4096) if not data: yield from asyncio.sleep(0.1) yield from response.write(data) except FileNotFoundError: raise aiohttp.web.HTTPNotFound() except PermissionError: raise aiohttp.web.HTTPForbidden()
def test_get_mac_id_multiple_project(loop, port_manager): # Cleanup the VPCS object VPCS._instance = None vpcs = VPCS.instance() vpcs.port_manager = port_manager vm1_id = str(uuid.uuid4()) vm2_id = str(uuid.uuid4()) vm3_id = str(uuid.uuid4()) project1 = ProjectManager.instance().create_project(project_id=str(uuid.uuid4())) project2 = ProjectManager.instance().create_project(project_id=str(uuid.uuid4())) loop.run_until_complete(vpcs.create_node("PC 1", project1.id, vm1_id)) loop.run_until_complete(vpcs.create_node("PC 2", project1.id, vm2_id)) loop.run_until_complete(vpcs.create_node("PC 2", project2.id, vm3_id)) assert vpcs.get_mac_id(vm1_id) == 0 assert vpcs.get_mac_id(vm2_id) == 1 assert vpcs.get_mac_id(vm3_id) == 0
async def update_project(request, response): pm = ProjectManager.instance() project = pm.get_project(request.match_info["project_id"]) await project.update(variables=request.json.get("variables", None)) response.set_status(200) response.json(project)
def create_project(request, response): pm = ProjectManager.instance() p = pm.create_project(name=request.json.get("name"), path=request.json.get("path"), project_id=request.json.get("project_id")) response.set_status(201) response.json(p)
def allocate_udp_port(request, response): pm = ProjectManager.instance() project = pm.get_project(request.match_info["project_id"]) m = PortManager.instance() udp_port = m.get_free_udp_port(project) response.set_status(201) response.json({"udp_port": udp_port})
async def update_project(request, response): pm = ProjectManager.instance() project = pm.get_project(request.match_info["project_id"]) await project.update( variables=request.json.get("variables", None) ) response.set_status(200) response.json(project)
def test_get_mac_id_multiple_project(loop, port_manager): # Cleanup the VPCS object VPCS._instance = None vpcs = VPCS.instance() vpcs.port_manager = port_manager vm1_id = str(uuid.uuid4()) vm2_id = str(uuid.uuid4()) vm3_id = str(uuid.uuid4()) project1 = ProjectManager.instance().create_project( project_id=str(uuid.uuid4())) project2 = ProjectManager.instance().create_project( project_id=str(uuid.uuid4())) loop.run_until_complete(vpcs.create_node("PC 1", project1.id, vm1_id)) loop.run_until_complete(vpcs.create_node("PC 2", project1.id, vm2_id)) loop.run_until_complete(vpcs.create_node("PC 2", project2.id, vm3_id)) assert vpcs.get_mac_id(vm1_id) == 0 assert vpcs.get_mac_id(vm2_id) == 1 assert vpcs.get_mac_id(vm3_id) == 0
def start(request, response): vbox_manager = VirtualBox.instance() vm = vbox_manager.get_node(request.match_info["node_id"], project_id=request.match_info["project_id"]) if (yield from vm.check_hw_virtualization()): pm = ProjectManager.instance() if pm.check_hardware_virtualization(vm) is False: raise HTTPConflict(text="Cannot start VM because hardware virtualization (VT-x/AMD-V) is already used by another software like VMware or KVM (on Linux)") yield from vm.start() response.set_status(204)
def start(request, response): qemu_manager = Qemu.instance() vm = qemu_manager.get_node(request.match_info["node_id"], project_id=request.match_info["project_id"]) if sys.platform.startswith("linux") and qemu_manager.config.get_section_config("Qemu").getboolean("enable_kvm", True) and "-no-kvm" not in vm.options: pm = ProjectManager.instance() if pm.check_hardware_virtualization(vm) is False: raise aiohttp.web.HTTPConflict(text="Cannot start VM with KVM enabled because hardware virtualization (VT-x/AMD-V) is already used by another software like VMware or VirtualBox") yield from vm.start() response.json(vm)
def start(request, response): vmware_manager = VMware.instance() vm = vmware_manager.get_node(request.match_info["node_id"], project_id=request.match_info["project_id"]) if vm.check_hw_virtualization(): pm = ProjectManager.instance() if pm.check_hardware_virtualization(vm) is False: raise HTTPConflict(text="Cannot start VM because hardware virtualization (VT-x/AMD-V) is already used by another software like VirtualBox or KVM (on Linux)") yield from vm.start() response.set_status(204)
def create_project(request, response): pm = ProjectManager.instance() p = pm.create_project( name=request.json.get("name"), path=request.json.get("path"), project_id=request.json.get("project_id") ) response.set_status(201) response.json(p)
async def get_file(request, response): pm = ProjectManager.instance() project = pm.get_project(request.match_info["project_id"]) path = os.path.normpath(request.match_info["path"]) # Raise error if user try to escape if not is_safe_path(path, project.path): raise aiohttp.web.HTTPForbidden() path = os.path.join(project.path, path) await response.stream_file(path)
def test_write_file(http_compute, tmpdir): with patch("gns3server.config.Config.get_section_config", return_value={"projects_path": str(tmpdir)}): project = ProjectManager.instance().create_project(project_id="01010203-0405-0607-0809-0a0b0c0d0e0b") response = http_compute.post("/projects/{project_id}/files/hello".format(project_id=project.id), body="world", raw=True) assert response.status == 200 with open(os.path.join(project.path, "hello")) as f: assert f.read() == "world" response = http_compute.post("/projects/{project_id}/files/../hello".format(project_id=project.id), raw=True) assert response.status == 404
async def get_file(request, response): pm = ProjectManager.instance() project = pm.get_project(request.match_info["project_id"]) path = request.match_info["path"] path = os.path.normpath(path) # Raise error if user try to escape if path[0] == ".": raise aiohttp.web.HTTPForbidden() path = os.path.join(project.path, path) await response.stream_file(path)
async def close(request, response): pm = ProjectManager.instance() project = pm.get_project(request.match_info["project_id"]) if ProjectHandler._notifications_listening.setdefault(project.id, 0) <= 1: await project.close() pm.remove_project(project.id) try: del ProjectHandler._notifications_listening[project.id] except KeyError: pass else: log.warning("Skip project closing, another client is listening for project notifications") response.set_status(204)
def test_list_projects(http_compute): ProjectManager.instance()._projects = {} query = {"name": "test", "project_id": "51010203-0405-0607-0809-0a0b0c0d0e0f"} response = http_compute.post("/projects", query) assert response.status == 201 query = {"name": "test", "project_id": "52010203-0405-0607-0809-0a0b0c0d0e0b"} response = http_compute.post("/projects", query) assert response.status == 201 response = http_compute.get("/projects", example=True) assert response.status == 200 assert len(response.json) == 2 assert "51010203-0405-0607-0809-0a0b0c0d0e0f" in [p["project_id"] for p in response.json]
def export_project(request, response): pm = ProjectManager.instance() project = pm.get_project(request.match_info["project_id"]) response.content_type = 'application/gns3project' response.headers['CONTENT-DISPOSITION'] = 'attachment; filename="{}.gns3project"'.format(project.name) response.enable_chunked_encoding() yield from response.prepare(request) include_images = bool(int(request.json.get("include_images", "0"))) for data in project.export(include_images=include_images): response.write(data) yield from response.drain() yield from response.write_eof()
async def test_list_projects(compute_api): ProjectManager.instance()._projects = {} params = {"name": "test", "project_id": "51010203-0405-0607-0809-0a0b0c0d0e0f"} response = await compute_api.post("/projects", params) assert response.status == 201 params = {"name": "test", "project_id": "52010203-0405-0607-0809-0a0b0c0d0e0b"} response = await compute_api.post("/projects", params) assert response.status == 201 response = await compute_api.get("/projects") assert response.status == 200 assert len(response.json) == 2 assert "51010203-0405-0607-0809-0a0b0c0d0e0f" in [p["project_id"] for p in response.json]
async def start(request, response): qemu_manager = Qemu.instance() vm = qemu_manager.get_node(request.match_info["node_id"], project_id=request.match_info["project_id"]) hardware_accel = qemu_manager.config.get_section_config("Qemu").getboolean("enable_hardware_acceleration", True) if sys.platform.startswith("linux"): # the enable_kvm option was used before version 2.0 and has priority enable_kvm = qemu_manager.config.get_section_config("Qemu").getboolean("enable_kvm") if enable_kvm is not None: hardware_accel = enable_kvm if hardware_accel and "-no-kvm" not in vm.options and "-no-hax" not in vm.options: pm = ProjectManager.instance() if pm.check_hardware_virtualization(vm) is False: raise aiohttp.web.HTTPConflict(text="Cannot start VM with hardware acceleration (KVM/HAX) enabled because hardware virtualization (VT-x/AMD-V) is already used by another software like VMware or VirtualBox") await vm.start() response.json(vm)
def export_project(request, response): pm = ProjectManager.instance() project = pm.get_project(request.match_info["project_id"]) response.content_type = 'application/gns3project' response.headers[ 'CONTENT-DISPOSITION'] = 'attachment; filename="{}.gns3project"'.format( project.name) response.enable_chunked_encoding() yield from response.prepare(request) include_images = bool(int(request.json.get("include_images", "0"))) for data in project.export(include_images=include_images): response.write(data) yield from response.drain() yield from response.write_eof()
def start(request, response): qemu_manager = Qemu.instance() vm = qemu_manager.get_node(request.match_info["node_id"], project_id=request.match_info["project_id"]) if sys.platform.startswith( "linux") and qemu_manager.config.get_section_config( "Qemu").getboolean("enable_kvm", True) and "-no-kvm" not in vm.options: pm = ProjectManager.instance() if pm.check_hardware_virtualization(vm) is False: raise aiohttp.web.HTTPConflict( text= "Cannot start VM with KVM enabled because hardware virtualization (VT-x/AMD-V) is already used by another software like VMware or VirtualBox" ) yield from vm.start() response.json(vm)
def export_project(request, response): pm = ProjectManager.instance() project = pm.get_project(request.match_info["project_id"]) response.content_type = 'application/gns3project' response.headers['CONTENT-DISPOSITION'] = 'attachment; filename="{}.gns3project"'.format(project.name) response.enable_chunked_encoding() # Very important: do not send a content length otherwise QT closes the connection (curl can consume the feed) response.content_length = None response.start(request) include_images = bool(int(request.json.get("include_images", "0"))) for data in project.export(include_images=include_images): response.write(data) yield from response.drain() yield from response.write_eof()
def test_write_file(http_compute, tmpdir): with patch("gns3server.config.Config.get_section_config", return_value={"projects_path": str(tmpdir)}): project = ProjectManager.instance().create_project( project_id="01010203-0405-0607-0809-0a0b0c0d0e0b") response = http_compute.post( "/projects/{project_id}/files/hello".format(project_id=project.id), body="world", raw=True) assert response.status == 200 with open(os.path.join(project.path, "hello")) as f: assert f.read() == "world" response = http_compute.post( "/projects/{project_id}/files/../hello".format(project_id=project.id), raw=True) assert response.status == 403
def import_project(request, response): pm = ProjectManager.instance() project_id = request.match_info["project_id"] project = pm.create_project(project_id=project_id) # We write the content to a temporary location and after we extract it all. # It could be more optimal to stream this but it is not implemented in Python. # Spooled means the file is temporary kept in memory until max_size is reached try: with tempfile.SpooledTemporaryFile(max_size=10000) as temp: while True: packet = yield from request.content.read(512) if not packet: break temp.write(packet) project.import_zip(temp, gns3vm=bool(int(request.GET.get("gns3vm", "1")))) except OSError as e: raise aiohttp.web.HTTPInternalServerError(text="Could not import the project: {}".format(e)) response.json(project) response.set_status(201)
def test_list_projects(http_compute): ProjectManager.instance()._projects = {} query = { "name": "test", "project_id": "51010203-0405-0607-0809-0a0b0c0d0e0f" } response = http_compute.post("/projects", query) assert response.status == 201 query = { "name": "test", "project_id": "52010203-0405-0607-0809-0a0b0c0d0e0b" } response = http_compute.post("/projects", query) assert response.status == 201 response = http_compute.get("/projects", example=True) assert response.status == 200 assert len(response.json) == 2 assert "51010203-0405-0607-0809-0a0b0c0d0e0f" in [ p["project_id"] for p in response.json ]
def test_project_not_found(): pm = ProjectManager.instance() with pytest.raises(aiohttp.web.HTTPNotFound): pm.get_project('00010203-0405-0607-0809-000000000000')
def compute(request, response): response.template("compute.html", port_manager=PortManager.instance(), project_manager=ProjectManager.instance())
def show(request, response): pm = ProjectManager.instance() project = pm.get_project(request.match_info["project_id"]) response.json(project)
async def delete(request, response): # check the project_id exists ProjectManager.instance().get_project(request.match_info["project_id"]) await VMware.instance().delete_node(request.match_info["node_id"]) response.set_status(204)
async def delete(request, response): # check the project_id exists ProjectManager.instance().get_project(request.match_info["project_id"]) await VirtualBox.instance().delete_node(request.match_info["node_id"]) response.set_status(204)
def list_projects(request, response): pm = ProjectManager.instance() response.set_status(200) response.json(list(pm.projects))
def delete(request, response): # check the project_id exists ProjectManager.instance().get_project(request.match_info["project_id"]) yield from VMware.instance().delete_node(request.match_info["node_id"]) response.set_status(204)
def test_create_project(): pm = ProjectManager.instance() project = pm.create_project(project_id='00010203-0405-0607-0809-0a0b0c0d0e0f') assert project == pm.get_project('00010203-0405-0607-0809-0a0b0c0d0e0f')