def test_absolute_local_file_single(self): """extract_root_layered_fsimage_url supports absolute file:/// uris.""" tmpd = self.tmp_dir() target = self.tmp_path("target_d", tmpd) fpath = self.tmp_path("my.img", tmpd) util.write_file(fpath, fpath + " data\n") extract_root_layered_fsimage_url("file:///" + fpath, target) self.assertEqual(1, self.m__extract_root_layered_fsimage.call_count) self.assertEqual(0, self.m_download.call_count)
def test_local_file_path_single(self): """extract_root_layered_fsimage_url supports normal file path without file:""" tmpd = self.tmp_dir() target = self.tmp_path("target_d", tmpd) fpath = self.tmp_path("my.img", tmpd) util.write_file(fpath, fpath + " data\n") extract_root_layered_fsimage_url(os.path.abspath(fpath), target) self.assertEqual(1, self.m__extract_root_layered_fsimage.call_count) self.assertEqual(0, self.m_download.call_count)
def test_remote_file_single(self): """extract_root_layered_fsimage_url supports http:// urls.""" tmpd = self.tmp_dir() target = self.tmp_path("target_d", tmpd) myurl = "http://example.io/minimal.squashfs" extract_root_layered_fsimage_url(myurl, target) self.assertEqual(1, self.m__extract_root_layered_fsimage.call_count) self.assertEqual(1, self.m_download.call_count) self.assertEqual("http://example.io/minimal.squashfs", self.m_download.call_args_list[0][0][0]) # ensure the file got cleaned up. self.assertEqual([], [f for f in self.downloads if os.path.exists(f)])
def test_relative_local_file_single(self): """extract_root_layered_fsimage_url supports relative file:// uris.""" tmpd = self.tmp_dir() target = self.tmp_path("target_d", tmpd) startdir = os.getcwd() fname = "my.img" try: os.chdir(tmpd) util.write_file(fname, fname + " data\n") extract_root_layered_fsimage_url("file://" + fname, target) finally: os.chdir(startdir) self.assertEqual(1, self.m__extract_root_layered_fsimage.call_count) self.assertEqual(0, self.m_download.call_count)
def test_local_file_path_multiple(self): """extract_root_layered_fsimage_url supports normal hierarchy file path""" tmpd = self.tmp_dir() target = self.tmp_path("target_d", tmpd) arg = os.path.abspath( self.tmp_path("minimal.standard.debug.squashfs", tmpd)) for f in [ "minimal.squashfs", "minimal.standard.squashfs", "minimal.standard.debug.squashfs" ]: fpath = self.tmp_path(f, tmpd) util.write_file(fpath, fpath + " data\n") extract_root_layered_fsimage_url(arg, target) self.assertEqual(1, self.m__extract_root_layered_fsimage.call_count) self.assertEqual(0, self.m_download.call_count)
def test_remote_file_multiple(self): """extract_root_layered_fsimage_url supports normal hierarchy from http:// urls.""" tmpd = self.tmp_dir() target = self.tmp_path("target_d", tmpd) myurl = "http://example.io/minimal.standard.debug.squashfs" extract_root_layered_fsimage_url(myurl, target) self.assertEqual(1, self.m__extract_root_layered_fsimage.call_count) self.assertEqual(3, self.m_download.call_count) for i, image_url in enumerate([ "minimal.squashfs", "minimal.standard.squashfs", "minimal.standard.debug.squashfs" ]): self.assertEqual("http://example.io/" + image_url, self.m_download.call_args_list[i][0][0]) # ensure the file got cleaned up. self.assertEqual([], [f for f in self.downloads if os.path.exists(f)])