def test_export_disallow_some_type(tmpdir, project, async_run): """ Dissalow export for some node type """ path = project.path topology = {"topology": {"nodes": [{"node_type": "cloud"}]}} with open(os.path.join(path, "test.gns3"), 'w+') as f: json.dump(topology, f) with pytest.raises(aiohttp.web.HTTPConflict): z = async_run(export_project(project, str(tmpdir))) z = async_run(export_project(project, str(tmpdir), allow_all_nodes=True)) # VirtualBox is always disallowed topology = { "topology": { "nodes": [{ "node_type": "virtualbox", "properties": { "linked_clone": True } }] } } with open(os.path.join(path, "test.gns3"), 'w+') as f: json.dump(topology, f) with pytest.raises(aiohttp.web.HTTPConflict): z = async_run( export_project(project, str(tmpdir), allow_all_nodes=True))
def test_export_with_ignoring_snapshots(tmpdir, project, async_run): with open(os.path.join(project.path, "test.gns3"), 'w+') as f: data = { "topology": { "computes": [ { "compute_id": "6b7149c8-7d6e-4ca0-ab6b-daa8ab567be0", "host": "127.0.0.1", "name": "Remote 1", "port": 8001, "protocol": "http" } ], "nodes": [ { "compute_id": "6b7149c8-7d6e-4ca0-ab6b-daa8ab567be0", "node_type": "vpcs" } ] } } json.dump(data, f) # create snapshot directory snapshots_dir = os.path.join(project.path, 'snapshots') os.makedirs(snapshots_dir) Path(os.path.join(snapshots_dir, 'snap.gns3project')).touch() with aiozipstream.ZipFile() as z: async_run(export_project(z, project, str(tmpdir), keep_compute_id=True)) async_run(write_file(str(tmpdir / 'zipfile.zip'), z)) with zipfile.ZipFile(str(tmpdir / 'zipfile.zip')) as myzip: assert not os.path.join('snapshots', 'snap.gns3project') in [f.filename for f in myzip.filelist]
def test_export_with_images(tmpdir, project, async_run): """ Fix absolute image path """ path = project.path os.makedirs(str(tmpdir / "IOS")) with open(str(tmpdir / "IOS" / "test.image"), "w+") as f: f.write("AAA") topology = { "topology": { "nodes": [ { "properties": { "image": "test.image" }, "node_type": "dynamips" } ] } } with open(os.path.join(path, "test.gns3"), 'w+') as f: json.dump(topology, f) with patch("gns3server.compute.Dynamips.get_images_directory", return_value=str(tmpdir / "IOS"),): z = async_run(export_project(project, str(tmpdir), include_images=True)) with open(str(tmpdir / 'zipfile.zip'), 'wb') as f: for data in z: f.write(data) with zipfile.ZipFile(str(tmpdir / 'zipfile.zip')) as myzip: myzip.getinfo("images/IOS/test.image")
def test_export_with_images(tmpdir, project, async_run): """ Fix absolute image path """ path = project.path os.makedirs(str(tmpdir / "IOS")) with open(str(tmpdir / "IOS" / "test.image"), "w+") as f: f.write("AAA") topology = { "topology": { "nodes": [ { "properties": { "image": "test.image" }, "node_type": "dynamips" } ] } } with open(os.path.join(path, "test.gns3"), 'w+') as f: json.dump(topology, f) with aiozipstream.ZipFile() as z: with patch("gns3server.compute.Dynamips.get_images_directory", return_value=str(tmpdir / "IOS"),): async_run(export_project(z, project, str(tmpdir), include_images=True)) async_run(write_file(str(tmpdir / 'zipfile.zip'), z)) with zipfile.ZipFile(str(tmpdir / 'zipfile.zip')) as myzip: myzip.getinfo("images/IOS/test.image")
def test_export_vm(tmpdir, project, async_run): """ If data is on a remote server export it locally before sending it in the archive. """ compute = MagicMock() compute.id = "vm" compute.list_files = AsyncioMagicMock(return_value=[{"path": "vm-1/dynamips/test"}]) # Fake file that will be download from the vm mock_response = AsyncioMagicMock() mock_response.content = AsyncioBytesIO() async_run(mock_response.content.write(b"HELLO")) mock_response.content.seek(0) compute.download_file = AsyncioMagicMock(return_value=mock_response) project._project_created_on_compute.add(compute) path = project.path os.makedirs(os.path.join(path, "vm-1", "dynamips")) # The .gns3 should be renamed project.gns3 in order to simplify import with open(os.path.join(path, "test.gns3"), 'w+') as f: f.write("{}") with aiozipstream.ZipFile() as z: async_run(export_project(z, project, str(tmpdir))) assert compute.list_files.called async_run(write_file(str(tmpdir / 'zipfile.zip'), z)) with zipfile.ZipFile(str(tmpdir / 'zipfile.zip')) as myzip: with myzip.open("vm-1/dynamips/test") as myfile: content = myfile.read() assert content == b"HELLO"
def export_project(request, response): controller = Controller.instance() project = yield from controller.get_loaded_project(request.match_info["project_id"]) try: with tempfile.TemporaryDirectory() as tmp_dir: datas = yield from export_project(project, tmp_dir, include_images=bool(request.get("include_images", "0"))) # We need to do that now because export could failed and raise an HTTP error # that why response start need to be the later possible response.content_type = 'application/gns3project' response.headers['CONTENT-DISPOSITION'] = 'attachment; filename="{}.gns3project"'.format(project.name) response.enable_chunked_encoding() # Very important: do not send a content length otherwise QT closes the connection (curl can consume the feed) response.content_length = None response.start(request) for data in datas: response.write(data) yield from response.drain() yield from response.write_eof() # Will be raise if you have no space left or permission issue on your temporary directory # RuntimeError: something was wrong during the zip process except (OSError, RuntimeError) as e: raise aiohttp.web.HTTPNotFound(text="Can't export project: {}".format(str(e)))
def export_project(request, response): controller = Controller.instance() project = yield from controller.get_loaded_project( request.match_info["project_id"]) try: with tempfile.TemporaryDirectory() as tmp_dir: datas = yield from export_project( project, tmp_dir, include_images=bool( int(request.query.get("include_images", "0")))) # We need to do that now because export could failed and raise an HTTP error # that why response start need to be the later possible response.content_type = 'application/gns3project' response.headers[ 'CONTENT-DISPOSITION'] = 'attachment; filename="{}.gns3project"'.format( project.name) response.enable_chunked_encoding() yield from response.prepare(request) for data in datas: response.write(data) yield from response.drain() yield from response.write_eof() # Will be raise if you have no space left or permission issue on your temporary directory # RuntimeError: something was wrong during the zip process except (OSError, RuntimeError) as e: raise aiohttp.web.HTTPNotFound( text="Can't export project: {}".format(str(e)))
def test_export_disallow_some_type(tmpdir, project, async_run): """ Disallow export for some node type """ path = project.path topology = { "topology": { "nodes": [ { "node_type": "vmware" } ] } } with open(os.path.join(path, "test.gns3"), 'w+') as f: json.dump(topology, f) with pytest.raises(aiohttp.web.HTTPConflict): with aiozipstream.ZipFile() as z: async_run(export_project(z, project, str(tmpdir))) with aiozipstream.ZipFile() as z: async_run(export_project(z, project, str(tmpdir), allow_all_nodes=True)) # VirtualBox is always disallowed topology = { "topology": { "nodes": [ { "node_type": "virtualbox", "properties": { "linked_clone": True } } ] } } with open(os.path.join(path, "test.gns3"), 'w+') as f: json.dump(topology, f) with pytest.raises(aiohttp.web.HTTPConflict): with aiozipstream.ZipFile() as z: async_run(export_project(z, project, str(tmpdir), allow_all_nodes=True))
def test_export(tmpdir, project, async_run): path = project.path os.makedirs(os.path.join(path, "vm-1", "dynamips")) # The .gns3 should be renamed project.gns3 in order to simplify import with open(os.path.join(path, "test.gns3"), 'w+') as f: data = { "topology": { "computes": [ { "compute_id": "6b7149c8-7d6e-4ca0-ab6b-daa8ab567be0", "host": "127.0.0.1", "name": "Remote 1", "port": 8001, "protocol": "http" } ], "nodes": [ { "compute_id": "6b7149c8-7d6e-4ca0-ab6b-daa8ab567be0", "node_type": "vpcs" } ] } } json.dump(data, f) with open(os.path.join(path, "vm-1", "dynamips", "test"), 'w+') as f: f.write("HELLO") with open(os.path.join(path, "vm-1", "dynamips", "test_log.txt"), 'w+') as f: f.write("LOG") os.makedirs(os.path.join(path, "project-files", "snapshots")) with open(os.path.join(path, "project-files", "snapshots", "test"), 'w+') as f: f.write("WORLD") z = async_run(export_project(project, str(tmpdir))) with open(str(tmpdir / 'zipfile.zip'), 'wb') as f: for data in z: f.write(data) with zipfile.ZipFile(str(tmpdir / 'zipfile.zip')) as myzip: with myzip.open("vm-1/dynamips/test") as myfile: content = myfile.read() assert content == b"HELLO" assert 'test.gns3' not in myzip.namelist() assert 'project.gns3' in myzip.namelist() assert 'project-files/snapshots/test' not in myzip.namelist() assert 'vm-1/dynamips/test_log.txt' not in myzip.namelist() with myzip.open("project.gns3") as myfile: topo = json.loads(myfile.read().decode())["topology"] assert topo["nodes"][0]["compute_id"] == "local" # All node should have compute_id local after export assert topo["computes"] == []
def test_export(tmpdir, project, async_run): path = project.path os.makedirs(os.path.join(path, "vm-1", "dynamips")) # The .gns3 should be renamed project.gns3 in order to simplify import with open(os.path.join(path, "test.gns3"), 'w+') as f: data = { "topology": { "computes": [{ "compute_id": "6b7149c8-7d6e-4ca0-ab6b-daa8ab567be0", "host": "127.0.0.1", "name": "Remote 1", "port": 8001, "protocol": "http" }], "nodes": [{ "compute_id": "6b7149c8-7d6e-4ca0-ab6b-daa8ab567be0", "node_type": "vpcs" }] } } json.dump(data, f) with open(os.path.join(path, "vm-1", "dynamips", "test"), 'w+') as f: f.write("HELLO") with open(os.path.join(path, "vm-1", "dynamips", "test_log.txt"), 'w+') as f: f.write("LOG") os.makedirs(os.path.join(path, "project-files", "snapshots")) with open(os.path.join(path, "project-files", "snapshots", "test"), 'w+') as f: f.write("WORLD") z = async_run(export_project(project, str(tmpdir))) with open(str(tmpdir / 'zipfile.zip'), 'wb') as f: for data in z: f.write(data) with zipfile.ZipFile(str(tmpdir / 'zipfile.zip')) as myzip: with myzip.open("vm-1/dynamips/test") as myfile: content = myfile.read() assert content == b"HELLO" assert 'test.gns3' not in myzip.namelist() assert 'project.gns3' in myzip.namelist() assert 'project-files/snapshots/test' not in myzip.namelist() assert 'vm-1/dynamips/test_log.txt' not in myzip.namelist() with myzip.open("project.gns3") as myfile: topo = json.loads(myfile.read().decode())["topology"] assert topo["nodes"][0][ "compute_id"] == "local" # All node should have compute_id local after export assert topo["computes"] == []
def test_export_disallow_running(tmpdir, project, node, async_run): """ Dissallow export when a node is running """ path = project.path topology = {"topology": {"nodes": [{"node_type": "dynamips"}]}} with open(os.path.join(path, "test.gns3"), 'w+') as f: json.dump(topology, f) node._status = "started" with pytest.raises(aiohttp.web.HTTPConflict): async_run(export_project(project, str(tmpdir)))
def test_export_fix_path(tmpdir, project, async_run): """ Fix absolute image path, except for Docker """ path = project.path topology = { "topology": { "nodes": [ { "properties": { "image": "/tmp/c3725-adventerprisek9-mz.124-25d.image" }, "node_type": "dynamips" }, { "properties": { "image": "gns3/webterm:lastest" }, "node_type": "docker" } ] } } with open(os.path.join(path, "test.gns3"), 'w+') as f: json.dump(topology, f) z = async_run(export_project(project, str(tmpdir))) with open(str(tmpdir / 'zipfile.zip'), 'wb') as f: for data in z: f.write(data) with zipfile.ZipFile(str(tmpdir / 'zipfile.zip')) as myzip: with myzip.open("project.gns3") as myfile: content = myfile.read().decode() topology = json.loads(content) assert topology["topology"]["nodes"][0]["properties"]["image"] == "c3725-adventerprisek9-mz.124-25d.image" assert topology["topology"]["nodes"][1]["properties"]["image"] == "gns3/webterm:lastest"
def test_export_fix_path(tmpdir, project, async_run): """ Fix absolute image path, except for Docker """ path = project.path topology = { "topology": { "nodes": [ { "properties": { "image": "/tmp/c3725-adventerprisek9-mz.124-25d.image" }, "node_type": "dynamips" }, { "properties": { "image": "gns3/webterm:lastest" }, "node_type": "docker" } ] } } with open(os.path.join(path, "test.gns3"), 'w+') as f: json.dump(topology, f) z = async_run(export_project(project, str(tmpdir))) with open(str(tmpdir / 'zipfile.zip'), 'wb') as f: for data in z: f.write(data) with zipfile.ZipFile(str(tmpdir / 'zipfile.zip')) as myzip: with myzip.open("project.gns3") as myfile: content = myfile.read().decode() topology = json.loads(content) assert topology["topology"]["nodes"][0]["properties"]["image"] == "c3725-adventerprisek9-mz.124-25d.image" assert topology["topology"]["nodes"][1]["properties"]["image"] == "gns3/webterm:lastest"
def test_export_keep_compute_id(tmpdir, project, async_run): """ If we want to restore the same computes we could ask to keep them in the file """ with open(os.path.join(project.path, "test.gns3"), 'w+') as f: data = { "topology": { "computes": [ { "compute_id": "6b7149c8-7d6e-4ca0-ab6b-daa8ab567be0", "host": "127.0.0.1", "name": "Remote 1", "port": 8001, "protocol": "http" } ], "nodes": [ { "compute_id": "6b7149c8-7d6e-4ca0-ab6b-daa8ab567be0", "node_type": "vpcs" } ] } } json.dump(data, f) z = async_run(export_project(project, str(tmpdir), keep_compute_id=True)) with open(str(tmpdir / 'zipfile.zip'), 'wb') as f: for data in z: f.write(data) with zipfile.ZipFile(str(tmpdir / 'zipfile.zip')) as myzip: with myzip.open("project.gns3") as myfile: topo = json.loads(myfile.read().decode())["topology"] assert topo["nodes"][0]["compute_id"] == "6b7149c8-7d6e-4ca0-ab6b-daa8ab567be0" assert len(topo["computes"]) == 1
def test_export_keep_compute_id(tmpdir, project, async_run): """ If we want to restore the same computes we could ask to keep them in the file """ with open(os.path.join(project.path, "test.gns3"), 'w+') as f: data = { "topology": { "computes": [ { "compute_id": "6b7149c8-7d6e-4ca0-ab6b-daa8ab567be0", "host": "127.0.0.1", "name": "Remote 1", "port": 8001, "protocol": "http" } ], "nodes": [ { "compute_id": "6b7149c8-7d6e-4ca0-ab6b-daa8ab567be0", "node_type": "vpcs" } ] } } json.dump(data, f) z = async_run(export_project(project, str(tmpdir), keep_compute_id=True)) with open(str(tmpdir / 'zipfile.zip'), 'wb') as f: for data in z: f.write(data) with zipfile.ZipFile(str(tmpdir / 'zipfile.zip')) as myzip: with myzip.open("project.gns3") as myfile: topo = json.loads(myfile.read().decode())["topology"] assert topo["nodes"][0]["compute_id"] == "6b7149c8-7d6e-4ca0-ab6b-daa8ab567be0" assert len(topo["computes"]) == 1
def test_export_disallow_running(tmpdir, project, node, async_run): """ Dissallow export when a node is running """ path = project.path topology = { "topology": { "nodes": [ { "node_type": "dynamips" } ] } } with open(os.path.join(path, "test.gns3"), 'w+') as f: json.dump(topology, f) node._status = "started" with pytest.raises(aiohttp.web.HTTPConflict): z = async_run(export_project(project, str(tmpdir)))
def test_export_images_from_vm(tmpdir, project, async_run): """ If data is on a remote server export it locally before sending it in the archive. """ compute = MagicMock() compute.id = "vm" compute.list_files = AsyncioMagicMock(return_value=[ {"path": "vm-1/dynamips/test"} ]) # Fake file that will be download from the vm mock_response = AsyncioMagicMock() mock_response.content = AsyncioBytesIO() async_run(mock_response.content.write(b"HELLO")) mock_response.content.seek(0) mock_response.status = 200 compute.download_file = AsyncioMagicMock(return_value=mock_response) mock_response = AsyncioMagicMock() mock_response.content = AsyncioBytesIO() async_run(mock_response.content.write(b"IMAGE")) mock_response.content.seek(0) mock_response.status = 200 compute.download_image = AsyncioMagicMock(return_value=mock_response) project._project_created_on_compute.add(compute) path = project.path os.makedirs(os.path.join(path, "vm-1", "dynamips")) topology = { "topology": { "nodes": [ { "compute_id": "vm", "properties": { "image": "test.image" }, "node_type": "dynamips" } ] } } # The .gns3 should be renamed project.gns3 in order to simplify import with open(os.path.join(path, "test.gns3"), 'w+') as f: f.write(json.dumps(topology)) with aiozipstream.ZipFile() as z: async_run(export_project(z, project, str(tmpdir), include_images=True)) assert compute.list_files.called async_run(write_file(str(tmpdir / 'zipfile.zip'), z)) with zipfile.ZipFile(str(tmpdir / 'zipfile.zip')) as myzip: with myzip.open("vm-1/dynamips/test") as myfile: content = myfile.read() assert content == b"HELLO" with myzip.open("images/dynamips/test.image") as myfile: content = myfile.read() assert content == b"IMAGE"
def test_export(tmpdir, project, async_run): path = project.path os.makedirs(os.path.join(path, "vm-1", "dynamips")) os.makedirs(str(tmpdir / "IOS")) with open(str(tmpdir / "IOS" / "test.image"), "w+") as f: f.write("AAA") # The .gns3 should be renamed project.gns3 in order to simplify import with open(os.path.join(path, "test.gns3"), 'w+') as f: data = { "topology": { "computes": [ { "compute_id": "6b7149c8-7d6e-4ca0-ab6b-daa8ab567be0", "host": "127.0.0.1", "name": "Remote 1", "port": 8001, "protocol": "http" } ], "nodes": [ { "compute_id": "6b7149c8-7d6e-4ca0-ab6b-daa8ab567be0", "node_type": "dynamips", "properties": { "image": "test.image" } } ] } } json.dump(data, f) with open(os.path.join(path, "vm-1", "dynamips", "test"), 'w+') as f: f.write("HELLO") with open(os.path.join(path, "vm-1", "dynamips", "test_log.txt"), 'w+') as f: f.write("LOG") os.makedirs(os.path.join(path, "project-files", "snapshots")) with open(os.path.join(path, "project-files", "snapshots", "test"), 'w+') as f: f.write("WORLD") with aiozipstream.ZipFile() as z: with patch("gns3server.compute.Dynamips.get_images_directory", return_value=str(tmpdir / "IOS"),): async_run(export_project(z, project, str(tmpdir), include_images=False)) async_run(write_file(str(tmpdir / 'zipfile.zip'), z)) with zipfile.ZipFile(str(tmpdir / 'zipfile.zip')) as myzip: with myzip.open("vm-1/dynamips/test") as myfile: content = myfile.read() assert content == b"HELLO" assert 'test.gns3' not in myzip.namelist() assert 'project.gns3' in myzip.namelist() assert 'project-files/snapshots/test' not in myzip.namelist() assert 'vm-1/dynamips/test_log.txt' not in myzip.namelist() assert 'images/IOS/test.image' not in myzip.namelist() with myzip.open("project.gns3") as myfile: topo = json.loads(myfile.read().decode())["topology"] assert topo["nodes"][0]["compute_id"] == "local" # All node should have compute_id local after export assert topo["computes"] == []