def test_runs_templater(self): """test runs templater""" # create source dir src_dir = get_tempdir() self.assertTrue(os.path.exists(src_dir)) self.addCleanup(clean, src_dir) # where dotfiles will be installed dst_dir = get_tempdir() self.assertTrue(os.path.exists(dst_dir)) self.addCleanup(clean, dst_dir) # create 3 random files in source srcs = [create_random_file(src_dir)[0] for _ in range(3)] # setup installer and mocks installer = Installer() templater = MagicMock() templater.generate.return_value = b'content' installer.install(templater=templater, src=src_dir, dst=dst_dir, linktype=LinkTypes.LINK_CHILDREN, actionexec=None) for src in srcs: dst = os.path.join(dst_dir, os.path.basename(src)) # ensure dst is link self.assertTrue(os.path.islink(dst)) # ensure dst not directly linked to src self.assertNotEqual(os.path.realpath(dst), src)
def test_index(self): # init workingdir = get_tempdir() catalogpath = create_rnd_file(workingdir, 'catalog.json', content='') self.addCleanup(clean, workingdir) dirpath = get_tempdir() self.addCleanup(clean, dirpath) # create 3 files f1 = create_rnd_file(dirpath, get_rnd_string(5)) f2 = create_rnd_file(dirpath, get_rnd_string(5)) f3 = create_rnd_file(dirpath, get_rnd_string(5)) # create 2 directories d1 = create_dir(dirpath, get_rnd_string(3)) d2 = create_dir(dirpath, get_rnd_string(3)) # fill directories with files _ = create_rnd_file(d1, get_rnd_string(4)) _ = create_rnd_file(d1, get_rnd_string(4)) _ = create_rnd_file(d2, get_rnd_string(6)) noder = Noder() top = noder.new_top_node() catalog = Catalog(catalogpath, force=True, debug=False) # create fake args tmpdirname = 'tmpdir' args = { '<path>': dirpath, '<name>': tmpdirname, '--hash': True, '--meta': ['some meta'], '--no-subsize': False, '--verbose': True } # index the directory cmd_index(args, noder, catalog, top) self.assertTrue(os.stat(catalogpath).st_size != 0) # explore the top node to find all nodes self.assertTrue(len(top.children) == 1) storage = top.children[0] self.assertTrue(len(storage.children) == 5) # ensures files and directories are in names = [x.name for x in storage.children] self.assertTrue(os.path.basename(f1) in names) self.assertTrue(os.path.basename(f2) in names) self.assertTrue(os.path.basename(f3) in names) self.assertTrue(os.path.basename(d1) in names) self.assertTrue(os.path.basename(d2) in names) for node in storage.children: if node.name == os.path.basename(d1): self.assertTrue(len(node.children) == 2) elif node.name == os.path.basename(d2): self.assertTrue(len(node.children) == 1)
def test_link_children(self): # create source dir src_dir = get_tempdir() self.assertTrue(os.path.exists(src_dir)) self.addCleanup(clean, src_dir) # where dotfiles will be installed dst_dir = get_tempdir() self.assertTrue(os.path.exists(dst_dir)) self.addCleanup(clean, dst_dir) # create 3 random files in source srcs = [create_random_file(src_dir)[0] for _ in range(3)] installer = Installer() installer.linkall(templater=MagicMock(), src=src_dir, dst=dst_dir, actions=[]) # Ensure all destination files point to source for src in srcs: dst = os.path.join(dst_dir, src) self.assertEqual(os.path.realpath(dst), src)
def test_runs_templater(self, mocked_templategen): # create source dir src_dir = get_tempdir() self.assertTrue(os.path.exists(src_dir)) self.addCleanup(clean, src_dir) # where dotfiles will be installed dst_dir = get_tempdir() self.assertTrue(os.path.exists(dst_dir)) self.addCleanup(clean, dst_dir) # create 3 random files in source srcs = [create_random_file(src_dir)[0] for _ in range(3)] # setup installer and mocks installer = Installer() templater = MagicMock() templater.generate.return_value = b'content' # make templategen treat everything as a template mocked_templategen.is_template.return_value = True installer.linkall(templater=templater, src=src_dir, dst=dst_dir, actions=[]) for src in srcs: dst = os.path.join(dst_dir, os.path.basename(src)) # ensure dst is link self.assertTrue(os.path.islink(dst)) # ensure dst not directly linked to src # TODO: maybe check that its actually linked to template folder self.assertNotEqual(os.path.realpath(dst), src)
def test_fails_when_src_file(self): """test fails when src file""" # create source dir src_dir = get_tempdir() self.assertTrue(os.path.exists(src_dir)) self.addCleanup(clean, src_dir) src = create_random_file(src_dir)[0] # logger = MagicMock() templater = MagicMock() installer = Installer() # installer.log.err = logger # pass src file not src dir res, err = installer.install(templater=templater, src=src, dst='/dev/null', linktype=LinkTypes.LINK_CHILDREN, actionexec=None) # ensure nothing performed self.assertFalse(res) e = 'source dotfile is not a directory: {}'.format(src) self.assertEqual(err, e)
def test_fails_when_src_file(self): # create source dir src_dir = get_tempdir() self.assertTrue(os.path.exists(src_dir)) self.addCleanup(clean, src_dir) src = create_random_file(src_dir)[0] logger = MagicMock() templater = MagicMock() installer = Installer() installer.log.err = logger # pass src file not src dir res = installer.linkall(templater=templater, src=src, dst='/dev/null', actions=[]) # ensure nothing performed self.assertEqual(res, []) # ensure logger logged error logger.assert_called_with( 'source dotfile is not a directory: {}'.format(src))
def test_creates_dst(self): """test creates dst""" src_dir = get_tempdir() self.assertTrue(os.path.exists(src_dir)) self.addCleanup(clean, src_dir) # where dotfiles will be installed dst_dir = get_tempdir() self.addCleanup(clean, dst_dir) # move dst dir to new (uncreated) dir in dst dst_dir = os.path.join(dst_dir, get_string(6)) self.assertFalse(os.path.exists(dst_dir)) installer = Installer() installer.link_children(templater=MagicMock(), src=src_dir, dst=dst_dir, actionexec=None) # ensure dst dir created self.assertTrue(os.path.exists(dst_dir))
def test_jhelpers(self): """Test the install function""" # dotpath location tmp = get_tempdir() self.assertTrue(os.path.exists(tmp)) self.addCleanup(clean, tmp) # where dotfiles will be installed dst = get_tempdir() self.assertTrue(os.path.exists(dst)) self.addCleanup(clean, dst) # create the dotfile in dotdrop f1, c1 = create_random_file(tmp) with open(f1, 'w') as f: f.write(self.TEMPLATE) dst1 = os.path.join(dst, get_string(6)) d1 = Dotfile(get_string(5), dst1, os.path.basename(f1)) # generate the config and stuff profile = get_string(5) confpath = os.path.join(tmp, self.CONFIG_NAME) self.fake_config(confpath, d1, profile, tmp) conf = Cfg(confpath, profile, debug=True) self.assertTrue(conf is not None) # install them o = load_options(confpath, profile) o.safe = False o.install_showdiff = True o.variables = {} o.debug = True cmd_install(o) # now compare the generated files self.assertTrue(os.path.exists(dst1)) f1content = open(dst1, 'r').read() self.assertTrue(f1content == self.RESULT)
def test_prompts_to_replace_dst(self): """test prompts to replace dst""" # create source dir src_dir = get_tempdir() self.assertTrue(os.path.exists(src_dir)) self.addCleanup(clean, src_dir) # where dotfiles will be installed dst_dir = get_tempdir() self.addCleanup(clean, dst_dir) # Create destination file to be replaced dst = os.path.join(dst_dir, get_string(6)) with open(dst, 'w'): pass self.assertTrue(os.path.isfile(dst)) # setup mocks ask = MagicMock() ask.return_value = True # setup installer installer = Installer() installer.safe = True installer.log.ask = ask installer.install(templater=MagicMock(), src=src_dir, dst=dst, linktype=LinkTypes.LINK_CHILDREN, actionexec=None) # ensure destination now a directory self.assertTrue(os.path.isdir(dst)) # ensure prompted ask.assert_called_with( 'Remove regular file {} and replace with empty directory?'.format( dst))
def test_config(self): """Test the config class""" tmp = get_tempdir() self.assertTrue(os.path.exists(tmp)) self.addCleanup(clean, tmp) confpath = create_fake_config(tmp, configname=self.CONFIG_NAME, dotpath=self.CONFIG_DOTPATH, backup=self.CONFIG_BACKUP, create=self.CONFIG_CREATE) conf = Cfg(confpath, self.PROFILE, debug=True) self.assertTrue(conf is not None) opts = conf.settings self.assertTrue(opts is not None) self.assertTrue(opts != {}) self.assertTrue(opts['backup'] == self.CONFIG_BACKUP) self.assertTrue(opts['create'] == self.CONFIG_CREATE) dpath = os.path.basename(opts['dotpath']) self.assertTrue(dpath == self.CONFIG_DOTPATH) self.assertTrue(conf.dump() != '')
def test_install_import_configs(self): """Test the install function with imported configs""" # dotpath location tmp = get_tempdir() self.assertTrue(os.path.exists(tmp)) self.addCleanup(clean, tmp) os.mkdir(os.path.join(tmp, 'importing')) os.mkdir(os.path.join(tmp, 'imported')) # where dotfiles will be installed dst = get_tempdir() self.assertTrue(os.path.exists(dst)) self.addCleanup(clean, dst) # creating random dotfiles imported_dotfile, _ = create_random_file(os.path.join(tmp, 'imported')) imported_dotfile = { 'dst': os.path.join(dst, imported_dotfile), 'key': 'f_{}'.format(imported_dotfile), 'name': imported_dotfile, 'src': os.path.join(tmp, 'imported', imported_dotfile), } importing_dotfile, _ = \ create_random_file(os.path.join(tmp, 'importing')) importing_dotfile = { 'dst': os.path.join(dst, importing_dotfile), 'key': 'f_{}'.format(importing_dotfile), 'name': importing_dotfile, 'src': os.path.join(tmp, 'imported', importing_dotfile), } imported = { 'config': { 'dotpath': 'imported', }, 'dotfiles': { imported_dotfile['key']: { 'dst': imported_dotfile['dst'], 'src': imported_dotfile['name'], }, }, 'profiles': { 'host1': { 'dotfiles': [imported_dotfile['key']], }, }, } importing = { 'config': { 'dotpath': 'importing', }, 'dotfiles': { importing_dotfile['key']: { 'dst': importing_dotfile['dst'], 'src': importing_dotfile['src'], }, }, 'profiles': { 'host2': { 'dotfiles': [importing_dotfile['key']], 'include': ['host1'], }, }, } # create the imported base config file imported_path = create_fake_config(tmp, configname='config-2.yaml', **imported['config']) # create the importing base config file importing_path = create_fake_config(tmp, configname='config.yaml', import_configs=['config-2.yaml'], **importing['config']) # edit the imported config populate_fake_config(imported_path, **{ k: v for k, v in imported.items() if k != 'config' }) # edit the importing config populate_fake_config(importing_path, **{ k: v for k, v in importing.items() if k != 'config' }) # install them o = load_options(importing_path, 'host2') o.safe = False o.install_showdiff = True o.variables = {} cmd_install(o) # now compare the generated files self.assertTrue(os.path.exists(importing_dotfile['dst'])) self.assertTrue(os.path.exists(imported_dotfile['dst']))
def test_listings(self): """Test the compare function""" # setup some directories fold_config = os.path.join(os.path.expanduser('~'), '.config') create_dir(fold_config) fold_subcfg = os.path.join(os.path.expanduser('~'), '.config', get_string(5)) create_dir(fold_subcfg) self.addCleanup(clean, fold_subcfg) fold_tmp = get_tempdir() create_dir(fold_tmp) self.addCleanup(clean, fold_tmp) # create the directories tmp = get_tempdir() self.assertTrue(os.path.exists(tmp)) self.addCleanup(clean, tmp) dotfilespath = get_tempdir() self.assertTrue(os.path.exists(dotfilespath)) self.addCleanup(clean, dotfilespath) # create the dotfiles to test d1, c1 = create_random_file(fold_config) self.assertTrue(os.path.exists(d1)) self.addCleanup(clean, d1) d2, c2 = create_random_file(fold_subcfg) self.assertTrue(os.path.exists(d2)) self.addCleanup(clean, d2) d3, c3 = create_random_file(fold_tmp) self.assertTrue(os.path.exists(d3)) self.addCleanup(clean, d3) d4, c4 = create_random_file(fold_tmp, binary=True) self.assertTrue(os.path.exists(d4)) self.addCleanup(clean, d4) d5 = get_tempdir() self.assertTrue(os.path.exists(d5)) self.addCleanup(clean, d5) d6, _ = create_random_file(d5) self.assertTrue(os.path.exists(d6)) # create the config file profile = get_string(5) confpath = create_fake_config(dotfilespath, configname=self.CONFIG_NAME, dotpath=self.CONFIG_DOTPATH, backup=self.CONFIG_BACKUP, create=self.CONFIG_CREATE) self.assertTrue(os.path.exists(confpath)) o = load_options(confpath, profile) dfiles = [d1, d2, d3, d4, d5] # import the files o.import_path = dfiles cmd_importer(o) o = load_options(confpath, profile) # files cmd_list_profiles(o) # list files o.files_templateonly = False cmd_files(o) o.files_templateonly = True cmd_files(o) # details o.detail_keys = None cmd_detail(o)
def test_update(self): """Test the update function""" # setup some directories fold_config = os.path.join(os.path.expanduser('~'), '.config') create_dir(fold_config) fold_subcfg = os.path.join(os.path.expanduser('~'), '.config', get_string(5)) create_dir(fold_subcfg) self.addCleanup(clean, fold_subcfg) fold_tmp = get_tempdir() create_dir(fold_tmp) self.addCleanup(clean, fold_tmp) # create the directories tmp = get_tempdir() self.assertTrue(os.path.exists(tmp)) self.addCleanup(clean, tmp) dotfilespath = get_tempdir() self.assertTrue(os.path.exists(dotfilespath)) self.addCleanup(clean, dotfilespath) # create the dotfiles to test d1, c1 = create_random_file(fold_config) self.assertTrue(os.path.exists(d1)) self.addCleanup(clean, d1) d2, c2 = create_random_file(fold_config) self.assertTrue(os.path.exists(d2)) self.addCleanup(clean, d2) # create the directory to test dpath = os.path.join(fold_config, get_string(5)) dir1 = create_dir(dpath) dirf1, _ = create_random_file(dpath) self.addCleanup(clean, dir1) # create the config file profile = get_string(5) confpath = create_fake_config(dotfilespath, configname=self.CONFIG_NAME, dotpath=self.CONFIG_DOTPATH, backup=self.CONFIG_BACKUP, create=self.CONFIG_CREATE) self.assertTrue(os.path.exists(confpath)) conf, opts = load_config(confpath, profile) dfiles = [d1, dir1, d2] # import the files cmd_importer(opts, conf, dfiles) conf, opts = load_config(confpath, profile) # edit the files edit_content(d1, 'newcontent') edit_content(dirf1, 'newcontent') # add more file dirf2, _ = create_random_file(dpath) # add more dirs dpath = os.path.join(dpath, get_string(5)) create_dir(dpath) create_random_file(dpath) # update it opts['safe'] = False opts['debug'] = True cmd_update(opts, conf, [d1, dir1]) # test content newcontent = open(d1, 'r').read() self.assertTrue(newcontent == 'newcontent') newcontent = open(dirf1, 'r').read() self.assertTrue(newcontent == 'newcontent') edit_content(d2, 'newcontentbykey') # update it by key dfiles = conf.get_dotfiles(profile) d2key = '' for ds in dfiles: t = os.path.expanduser(ds.dst) if t == d2: d2key = ds.key break self.assertTrue(d2key != '') opts['safe'] = False opts['debug'] = True cmd_update(opts, conf, [d2key], iskey=True) # test content newcontent = open(d2, 'r').read() self.assertTrue(newcontent == 'newcontentbykey')
def test_compare(self): """Test the compare function""" # setup some directories fold_config = os.path.join(os.path.expanduser('~'), '.config') create_dir(fold_config) fold_subcfg = os.path.join(os.path.expanduser('~'), '.config', get_string(5)) create_dir(fold_subcfg) self.addCleanup(clean, fold_subcfg) fold_tmp = get_tempdir() create_dir(fold_tmp) self.addCleanup(clean, fold_tmp) # create the directories tmp = get_tempdir() self.assertTrue(os.path.exists(tmp)) self.addCleanup(clean, tmp) dotfilespath = get_tempdir() self.assertTrue(os.path.exists(dotfilespath)) self.addCleanup(clean, dotfilespath) # create the dotfiles to test d1, c1 = create_random_file(fold_config) self.assertTrue(os.path.exists(d1)) self.addCleanup(clean, d1) d2, c2 = create_random_file(fold_subcfg) self.assertTrue(os.path.exists(d2)) self.addCleanup(clean, d2) d3, c3 = create_random_file(fold_tmp) self.assertTrue(os.path.exists(d3)) self.addCleanup(clean, d3) d4, c4 = create_random_file(fold_tmp, binary=True) self.assertTrue(os.path.exists(d4)) self.addCleanup(clean, d4) d5 = get_tempdir() self.assertTrue(os.path.exists(d5)) self.addCleanup(clean, d5) d6, _ = create_random_file(d5) self.assertTrue(os.path.exists(d6)) d9 = get_tempdir() self.assertTrue(os.path.exists(d9)) self.addCleanup(clean, d9) d9sub = os.path.join(d9, get_string(5)) create_dir(d9sub) d9f1, _ = create_random_file(d9sub) # create the config file profile = get_string(5) confpath = create_fake_config(dotfilespath, configname=self.CONFIG_NAME, dotpath=self.CONFIG_DOTPATH, backup=self.CONFIG_BACKUP, create=self.CONFIG_CREATE) self.assertTrue(os.path.exists(confpath)) o = load_options(confpath, profile) o.longkey = True o.debug = True dfiles = [d1, d2, d3, d4, d5, d9] # import the files o.import_path = dfiles cmd_importer(o) o = load_options(confpath, profile) # compare the files expected = {d1: True, d2: True, d3: True, d4: True, d5: True, d9: True} results = self.compare(o, tmp, len(dfiles)) self.assertTrue(results == expected) # modify file edit_content(d1, get_string(20)) expected = {d1: False, d2: True, d3: True, d4: True, d5: True, d9: True} results = self.compare(o, tmp, len(dfiles)) self.assertTrue(results == expected) # modify binary file edit_content(d4, bytes(get_string(20), 'ascii'), binary=True) expected = {d1: False, d2: True, d3: True, d4: False, d5: True, d9: True} results = self.compare(o, tmp, len(dfiles)) self.assertTrue(results == expected) # add file in directory d7, _ = create_random_file(d5) self.assertTrue(os.path.exists(d7)) expected = {d1: False, d2: True, d3: True, d4: False, d5: False, d9: True} results = self.compare(o, tmp, len(dfiles)) self.assertTrue(results == expected) # modify all files edit_content(d2, get_string(20)) edit_content(d3, get_string(21)) expected = {d1: False, d2: False, d3: False, d4: False, d5: False, d9: True} results = self.compare(o, tmp, len(dfiles)) self.assertTrue(results == expected) # edit sub file edit_content(d9f1, get_string(12)) expected = {d1: False, d2: False, d3: False, d4: False, d5: False, d9: False} results = self.compare(o, tmp, len(dfiles)) self.assertTrue(results == expected) # test compare from dotdrop self.assertFalse(cmd_compare(o, tmp)) # test focus o.compare_focus = [d4] self.assertFalse(cmd_compare(o, tmp)) o.compare_focus = ['/tmp/fake'] self.assertFalse(cmd_compare(o, tmp))
def test_update(self): """Test the update function""" # setup some directories fold_config = os.path.join(os.path.expanduser('~'), '.config') create_dir(fold_config) fold_subcfg = os.path.join(os.path.expanduser('~'), '.config', get_string(5)) create_dir(fold_subcfg) self.addCleanup(clean, fold_subcfg) fold_tmp = get_tempdir() create_dir(fold_tmp) self.addCleanup(clean, fold_tmp) # create the directories tmp = get_tempdir() self.assertTrue(os.path.exists(tmp)) self.addCleanup(clean, tmp) dotfilespath = get_tempdir() self.assertTrue(os.path.exists(dotfilespath)) self.addCleanup(clean, dotfilespath) # create the dotfiles to test d1, c1 = create_random_file(fold_config) self.assertTrue(os.path.exists(d1)) self.addCleanup(clean, d1) d2, c2 = create_random_file(fold_config) self.assertTrue(os.path.exists(d2)) self.addCleanup(clean, d2) # template d3t, c3t = create_random_file(fold_config) self.assertTrue(os.path.exists(d3t)) self.addCleanup(clean, d3t) # sub dirs dsubstmp = get_tempdir() self.assertTrue(os.path.exists(dsubstmp)) self.addCleanup(clean, dsubstmp) dirsubs = os.path.basename(dsubstmp) dir1string = 'somedir' dir1 = os.path.join(dsubstmp, dir1string) create_dir(dir1) dir1sub1str = 'sub1' sub1 = os.path.join(dir1, dir1sub1str) create_dir(sub1) dir1sub2str = 'sub2' sub2 = os.path.join(dir1, dir1sub2str) create_dir(sub2) f1s1, f1s1c1 = create_random_file(sub1) self.assertTrue(os.path.exists(f1s1)) f1s2, f1s2c1 = create_random_file(sub2) self.assertTrue(os.path.exists(f1s2)) # create the directory to test dpath = os.path.join(fold_config, get_string(5)) dir1 = create_dir(dpath) dirf1, _ = create_random_file(dpath) self.addCleanup(clean, dir1) # create the config file profile = get_string(5) confpath = create_fake_config(dotfilespath, configname=self.CONFIG_NAME, dotpath=self.CONFIG_DOTPATH, backup=self.CONFIG_BACKUP, create=self.CONFIG_CREATE) self.assertTrue(os.path.exists(confpath)) o = load_options(confpath, profile) o.debug = True o.update_showpatch = True dfiles = [d1, dir1, d2, d3t, dsubstmp] # import the files o.import_path = dfiles cmd_importer(o) # get new config o = load_options(confpath, profile) o.safe = False o.debug = True o.update_showpatch = True trans = Transform('trans', 'cp -r {0} {1}') d3tb = os.path.basename(d3t) for dotfile in o.dotfiles: if os.path.basename(dotfile.dst) == d3tb: # patch the template src = os.path.join(o.dotpath, dotfile.src) src = os.path.expanduser(src) edit_content(src, '{{@@ profile @@}}') if os.path.basename(dotfile.dst) == dirsubs: # retrieve the path of the sub in the dotpath d1indotpath = os.path.join(o.dotpath, dotfile.src) d1indotpath = os.path.expanduser(d1indotpath) dotfile.trans_w = trans # update template o.update_path = [d3t] self.assertFalse(cmd_update(o)) # update sub dirs gone = os.path.join(d1indotpath, dir1string) gone = os.path.join(gone, dir1sub1str) self.assertTrue(os.path.exists(gone)) clean(sub1) # dir1sub1str self.assertTrue(os.path.exists(gone)) o.update_path = [dsubstmp] cmd_update(o) self.assertFalse(os.path.exists(gone)) # edit the files edit_content(d1, 'newcontent') edit_content(dirf1, 'newcontent') # add more file dirf2, _ = create_random_file(dpath) # add more dirs dpath = os.path.join(dpath, get_string(5)) create_dir(dpath) create_random_file(dpath) # update it o.update_path = [d1, dir1] cmd_update(o) # test content newcontent = open(d1, 'r').read() self.assertTrue(newcontent == 'newcontent') newcontent = open(dirf1, 'r').read() self.assertTrue(newcontent == 'newcontent') edit_content(d2, 'newcontentbykey') # update it by key dfiles = o.dotfiles d2key = '' for ds in dfiles: t = os.path.expanduser(ds.dst) if t == d2: d2key = ds.key break self.assertTrue(d2key != '') o.update_path = [d2key] o.update_iskey = True cmd_update(o) # test content newcontent = open(d2, 'r').read() self.assertTrue(newcontent == 'newcontentbykey')
def test_index(self): # init workingdir = get_tempdir() catalogpath = create_rnd_file(workingdir, 'catalog.json', content='') self.addCleanup(clean, workingdir) dirpath = get_tempdir() self.addCleanup(clean, dirpath) # create 3 files f1 = create_rnd_file(dirpath, 'file1') f2 = create_rnd_file(dirpath, 'file2') f3 = create_rnd_file(dirpath, 'file3') f4 = create_rnd_file(dirpath, 'file4') # create 2 directories d1 = create_dir(dirpath, 'dir1') d2 = create_dir(dirpath, 'dir2') # fill directories with files d1f1 = create_rnd_file(d1, 'dir1file1') d1f2 = create_rnd_file(d1, 'dir1file2') d2f1 = create_rnd_file(d2, 'dir2file1') d2f2 = create_rnd_file(d2, 'dir2file2') noder = Noder(debug=True) noder.set_hashing(True) top = noder.new_top_node() catalog = Catalog(catalogpath, force=True, debug=False) # get checksums f4_md5 = md5sum(f4) self.assertTrue(f4_md5) d1f1_md5 = md5sum(d1f1) self.assertTrue(d1f1_md5) d2f2_md5 = md5sum(d2f2) self.assertTrue(d2f2_md5) # create fake args tmpdirname = 'tmpdir' args = { '<path>': dirpath, '<name>': tmpdirname, '--hash': True, '--meta': ['some meta'], '--no-subsize': False, '--verbose': True, '--lpath': None } # index the directory unix_tree(dirpath) cmd_index(args, noder, catalog, top) self.assertTrue(os.stat(catalogpath).st_size != 0) # ensure md5 sum are in nods = noder.find_name(top, os.path.basename(f4)) self.assertTrue(len(nods) == 1) nod = nods[0] self.assertTrue(nod) self.assertTrue(nod.md5 == f4_md5) # print catalog noder.print_tree(top) # add some files and directories new1 = create_rnd_file(d1, 'newf1') new2 = create_rnd_file(dirpath, 'newf2') new3 = create_dir(dirpath, 'newd3') new4 = create_dir(d2, 'newd4') new5 = create_rnd_file(new4, 'newf5') unix_tree(dirpath) # modify files EDIT = 'edited' edit_file(d1f1, EDIT) d1f1_md5_new = md5sum(d1f1) self.assertTrue(d1f1_md5_new) self.assertTrue(d1f1_md5_new != d1f1_md5) # change file without mtime maccess = os.path.getmtime(f4) EDIT = 'edited' edit_file(f4, EDIT) # reset edit time os.utime(f4, (maccess, maccess)) f4_md5_new = md5sum(d1f1) self.assertTrue(f4_md5_new) self.assertTrue(f4_md5_new != f4_md5) # change file without mtime maccess = os.path.getmtime(d2f2) EDIT = 'edited' edit_file(d2f2, EDIT) # reset edit time os.utime(d2f2, (maccess, maccess)) d2f2_md5_new = md5sum(d2f2) self.assertTrue(d2f2_md5_new) self.assertTrue(d2f2_md5_new != d2f2_md5) # update storage cmd_update(args, noder, catalog, top) # print catalog # print(read_from_file(catalogpath)) noder.print_tree(top) # explore the top node to find all nodes self.assertTrue(len(top.children) == 1) storage = top.children[0] self.assertTrue(len(storage.children) == 8) # ensure d1f1 md5 sum has changed in catalog nods = noder.find_name(top, os.path.basename(d1f1)) self.assertTrue(len(nods) == 1) nod = nods[0] self.assertTrue(nod) self.assertTrue(nod.md5 != d1f1_md5) self.assertTrue(nod.md5 == d1f1_md5_new) # ensure f4 md5 sum has changed in catalog nods = noder.find_name(top, os.path.basename(f4)) self.assertTrue(len(nods) == 1) nod = nods[0] self.assertTrue(nod) self.assertTrue(nod.md5 != f4_md5) self.assertTrue(nod.md5 == f4_md5_new) # ensure d2f2 md5 sum has changed in catalog nods = noder.find_name(top, os.path.basename(d2f2)) self.assertTrue(len(nods) == 1) nod = nods[0] self.assertTrue(nod) self.assertTrue(nod.md5 != d2f2_md5) self.assertTrue(nod.md5 == d2f2_md5_new) # ensures files and directories are in names = [node.name for node in anytree.PreOrderIter(storage)] print(names) self.assertTrue(os.path.basename(f1) in names) self.assertTrue(os.path.basename(f2) in names) self.assertTrue(os.path.basename(f3) in names) self.assertTrue(os.path.basename(f4) in names) self.assertTrue(os.path.basename(d1) in names) self.assertTrue(os.path.basename(d1f1) in names) self.assertTrue(os.path.basename(d1f2) in names) self.assertTrue(os.path.basename(d2) in names) self.assertTrue(os.path.basename(d2f1) in names) self.assertTrue(os.path.basename(new1) in names) self.assertTrue(os.path.basename(new2) in names) self.assertTrue(os.path.basename(new3) in names) self.assertTrue(os.path.basename(new4) in names) self.assertTrue(os.path.basename(new5) in names) for node in storage.children: if node.name == os.path.basename(d1): self.assertTrue(len(node.children) == 3) elif node.name == os.path.basename(d2): self.assertTrue(len(node.children) == 3) elif node.name == os.path.basename(new3): self.assertTrue(len(node.children) == 0) elif node.name == os.path.basename(new4): self.assertTrue(len(node.children) == 1) self.assertTrue(read_from_file(d1f1) == EDIT) # remove some files clean(d1f1) clean(d2) clean(new2) clean(new4) # update storage cmd_update(args, noder, catalog, top) # ensures files and directories are (not) in names = [node.name for node in anytree.PreOrderIter(storage)] print(names) self.assertTrue(os.path.basename(f1) in names) self.assertTrue(os.path.basename(f2) in names) self.assertTrue(os.path.basename(f3) in names) self.assertTrue(os.path.basename(f4) in names) self.assertTrue(os.path.basename(d1) in names) self.assertTrue(os.path.basename(d1f1) not in names) self.assertTrue(os.path.basename(d1f2) in names) self.assertTrue(os.path.basename(d2) not in names) self.assertTrue(os.path.basename(d2f1) not in names) self.assertTrue(os.path.basename(d2f1) not in names) self.assertTrue(os.path.basename(new1) in names) self.assertTrue(os.path.basename(new2) not in names) self.assertTrue(os.path.basename(new3) in names) self.assertTrue(os.path.basename(new4) not in names) self.assertTrue(os.path.basename(new5) not in names) for node in storage.children: if node.name == os.path.basename(d1): self.assertTrue(len(node.children) == 2) elif node.name == os.path.basename(new3): self.assertTrue(len(node.children) == 0)
def test_import(self): """Test the import function""" # on filesystem src = get_tempdir() self.assertTrue(os.path.exists(src)) self.addCleanup(clean, src) # in dotdrop dotfilespath = get_tempdir() self.assertTrue(os.path.exists(dotfilespath)) self.addCleanup(clean, dotfilespath) profile = get_string(10) confpath = create_fake_config(dotfilespath, configname=self.CONFIG_NAME, dotpath=self.CONFIG_DOTPATH, backup=self.CONFIG_BACKUP, create=self.CONFIG_CREATE) self.assertTrue(os.path.exists(confpath)) o = load_options(confpath, profile) # create some random dotfiles dotfile1, content1 = create_random_file(src) self.addCleanup(clean, dotfile1) dotfile2, content2 = create_random_file(os.path.expanduser('~')) self.addCleanup(clean, dotfile2) homeconf = os.path.join(os.path.expanduser('~'), '.config') if not os.path.exists(homeconf): os.mkdir(homeconf) self.addCleanup(clean, homeconf) dotconfig = os.path.join(homeconf, get_string(5)) create_dir(dotconfig) self.addCleanup(clean, dotconfig) dotfile3, content3 = create_random_file(dotconfig) dotfile4, content3 = create_random_file(homeconf) self.addCleanup(clean, dotfile4) # fake a directory containing dotfiles dotfile5 = get_tempdir() self.assertTrue(os.path.exists(dotfile5)) self.addCleanup(clean, dotfile5) sub1, _ = create_random_file(dotfile5) sub2, _ = create_random_file(dotfile5) # fake a file for symlink dotfile6, content6 = create_random_file(dotconfig) self.addCleanup(clean, dotfile6) # fake a directory for symlink dotfile7 = get_tempdir() self.assertTrue(os.path.exists(dotfile7)) self.addCleanup(clean, dotfile7) sub3, _ = create_random_file(dotfile7) sub4, _ = create_random_file(dotfile7) # import the dotfiles dfiles = [dotfile1, dotfile2, dotfile3, dotfile4, dotfile5] o.import_path = dfiles cmd_importer(o) # import symlink o.import_link = LinkTypes.LINK sfiles = [dotfile6, dotfile7] o.import_path = sfiles cmd_importer(o) o.import_link = LinkTypes.NOLINK # reload the config o = load_options(confpath, profile) # test dotfiles in config class self.assertTrue(profile in [p.key for p in o.profiles]) self.assert_file(dotfile1, o, profile) self.assert_file(dotfile2, o, profile) self.assert_file(dotfile3, o, profile) self.assert_file(dotfile4, o, profile) self.assert_file(dotfile5, o, profile) self.assert_file(dotfile6, o, profile) self.assert_file(dotfile7, o, profile) # test dotfiles in yaml file y = self.load_yaml(confpath) self.assert_in_yaml(dotfile1, y) self.assert_in_yaml(dotfile2, y) self.assert_in_yaml(dotfile3, y) self.assert_in_yaml(dotfile4, y) self.assert_in_yaml(dotfile5, y) self.assert_in_yaml(dotfile6, y, link=True) self.assert_in_yaml(dotfile7, y, link=True) # test have been imported in dotdrop dotpath directory indt1 = os.path.join(dotfilespath, self.CONFIG_DOTPATH, get_path_strip_version(dotfile1)) self.assertTrue(os.path.exists(indt1)) indt2 = os.path.join(dotfilespath, self.CONFIG_DOTPATH, get_path_strip_version(dotfile2)) self.assertTrue(os.path.exists(indt2)) indt3 = os.path.join(dotfilespath, self.CONFIG_DOTPATH, get_path_strip_version(dotfile3)) self.assertTrue(os.path.exists(indt3)) indt4 = os.path.join(dotfilespath, self.CONFIG_DOTPATH, get_path_strip_version(dotfile4)) self.assertTrue(os.path.exists(indt4)) indt5 = os.path.join(dotfilespath, self.CONFIG_DOTPATH, get_path_strip_version(dotfile5)) self.assertTrue(os.path.exists(indt5)) s1 = os.path.join(dotfilespath, self.CONFIG_DOTPATH, get_path_strip_version(dotfile6), sub1) self.assertTrue(os.path.exists(s1)) s2 = os.path.join(dotfilespath, self.CONFIG_DOTPATH, get_path_strip_version(dotfile6), sub2) self.assertTrue(os.path.exists(s2)) indt6 = os.path.join(dotfilespath, self.CONFIG_DOTPATH, get_path_strip_version(dotfile6)) self.assertTrue(os.path.exists(indt6)) indt7 = os.path.join(dotfilespath, self.CONFIG_DOTPATH, get_path_strip_version(dotfile7)) self.assertTrue(os.path.exists(indt7)) s3 = os.path.join(dotfilespath, self.CONFIG_DOTPATH, get_path_strip_version(dotfile7), sub3) self.assertTrue(os.path.exists(s3)) s4 = os.path.join(dotfilespath, self.CONFIG_DOTPATH, get_path_strip_version(dotfile7), sub4) self.assertTrue(os.path.exists(s4)) cmd_list_profiles(o) cmd_files(o) # fake test update editcontent = 'edited' edit_content(dotfile1, editcontent) o.safe = False o.update_path = [dotfile1] o.debug = True cmd_update(o) c2 = open(indt1, 'r').read() self.assertTrue(editcontent == c2)
def test_include(self): tmp = get_tempdir() self.assertTrue(os.path.exists(tmp)) self.addCleanup(clean, tmp) # create a base config file confpath = create_fake_config(tmp, configname=self.CONFIG_NAME, dotpath=self.CONFIG_DOTPATH, backup=self.CONFIG_BACKUP, create=self.CONFIG_CREATE) # edit the config with open(confpath, 'r') as f: content = yaml.load(f) # adding dotfiles df1key = 'f_vimrc' df2key = 'f_xinitrc' content['dotfiles'] = { df1key: { 'dst': '~/.vimrc', 'src': 'vimrc' }, df2key: { 'dst': '~/.xinitrc', 'src': 'xinitrc' } } # adding profiles pf1key = 'host1' pf2key = 'host2' content['profiles'] = { pf1key: { 'dotfiles': [df2key], 'include': ['host2'] }, pf2key: { 'dotfiles': [df1key] } } # save the new config with open(confpath, 'w') as f: yaml.dump(content, f, default_flow_style=False, indent=2) # do the tests conf = Cfg(confpath) self.assertTrue(conf is not None) # test profile profiles = conf.get_profiles() self.assertTrue(pf1key in profiles) self.assertTrue(pf2key in profiles) # test dotfiles dotfiles = conf._get_dotfiles(pf1key) self.assertTrue(df1key in [x.key for x in dotfiles]) self.assertTrue(df2key in [x.key for x in dotfiles]) dotfiles = conf._get_dotfiles(pf2key) self.assertTrue(df1key in [x.key for x in dotfiles]) self.assertFalse(df2key in [x.key for x in dotfiles]) # test not existing included profile # edit the config with open(confpath, 'r') as f: content = yaml.load(f) content['profiles'] = { pf1key: { 'dotfiles': [df2key], 'include': ['host2'] }, pf2key: { 'dotfiles': [df1key], 'include': ['host3'] } } # save the new config with open(confpath, 'w') as f: yaml.dump(content, f, default_flow_style=False, indent=2) # do the tests conf = Cfg(confpath) self.assertTrue(conf is not None)
def test_include(self): tmp = get_tempdir() self.assertTrue(os.path.exists(tmp)) self.addCleanup(clean, tmp) # create a base config file confpath = create_fake_config(tmp, configname=self.CONFIG_NAME, dotpath=self.CONFIG_DOTPATH, backup=self.CONFIG_BACKUP, create=self.CONFIG_CREATE) # edit the config content = yaml_load(confpath) # adding dotfiles df1key = 'f_vimrc' df2key = 'f_xinitrc' content['dotfiles'] = { df1key: { 'dst': '~/.vimrc', 'src': 'vimrc' }, df2key: { 'dst': '~/.xinitrc', 'src': 'xinitrc' } } # adding profiles pf1key = 'host1' pf2key = 'host2' content['profiles'] = { pf1key: { 'dotfiles': [df2key], 'include': ['host2'] }, pf2key: { 'dotfiles': [df1key] } } # save the new config yaml_dump(content, confpath) # do the tests conf = Cfg(confpath, debug=True) self.assertTrue(conf is not None) # test profile profiles = conf.profiles self.assertTrue(pf1key in profiles) self.assertTrue(pf2key in profiles) # test dotfiles dotfiles = conf.profiles[pf1key]['dotfiles'] self.assertTrue(df1key in dotfiles) self.assertTrue(df2key in dotfiles) dotfiles = conf.profiles[pf2key]['dotfiles'] self.assertTrue(df1key in dotfiles) self.assertFalse(df2key in dotfiles) # test not existing included profile # edit the config content = yaml_load(confpath) content['profiles'] = { pf1key: { 'dotfiles': [df2key], 'include': ['host2'] }, pf2key: { 'dotfiles': [df1key], 'include': ['host3'] } } # save the new config yaml_dump(content, confpath) # do the tests conf = Cfg(confpath, debug=True) self.assertTrue(conf is not None)
def test_import_configs_merge(self): """Test import_configs when all config keys merge.""" tmp = get_tempdir() self.assertTrue(os.path.exists(tmp)) self.addCleanup(clean, tmp) vars_ed = { 'variables': { 'a_var_ed': '33', }, 'dynvariables': { 'a_dynvar_ed': 'echo 33', }, } vars_ing = { 'variables': { 'a_var_ing': 'dd', }, 'dynvariables': { 'a_dynvar_ing': 'echo dd', }, } vars_ed_file = create_yaml_keyval(vars_ed, tmp) vars_ing_file = create_yaml_keyval(vars_ing, tmp) actions_ed = { 'actions': { 'pre': { 'a_pre_action_ed': 'echo pre 22', }, 'post': { 'a_post_action_ed': 'echo post 22', }, 'a_action_ed': 'echo 22', } } actions_ing = { 'actions': { 'pre': { 'a_pre_action_ing': 'echo pre aa', }, 'post': { 'a_post_action_ing': 'echo post aa', }, 'a_action_ing': 'echo aa', } } actions_ed_file = create_yaml_keyval(actions_ed, tmp) actions_ing_file = create_yaml_keyval(actions_ing, tmp) imported = { 'config': { 'dotpath': 'importing', 'import_variables': [vars_ed_file], 'import_actions': [actions_ed_file], }, 'dotfiles': { 'f_vimrc': { 'dst': '~/.vimrc', 'src': 'vimrc' }, }, 'profiles': { 'host1': { 'dotfiles': ['f_vimrc'], }, }, 'actions': { 'pre': { 'a_pre_log_ed': 'echo pre 2', }, 'post': { 'a_post_log_ed': 'echo post 2', }, 'a_log_ed': 'echo 2', }, 'trans': { 't_log_ed': 'echo 3', }, 'trans_write': { 'tw_log_ed': 'echo 4', }, 'variables': { 'v_log_ed': '42', }, 'dynvariables': { 'dv_log_ed': 'echo 5', }, } importing = { 'config': { 'dotpath': 'importing', 'import_variables': [vars_ing_file], 'import_actions': [actions_ing_file], }, 'dotfiles': { 'f_xinitrc': { 'dst': '~/.xinitrc', 'src': 'xinitrc' }, }, 'profiles': { 'host2': { 'dotfiles': ['f_xinitrc'], 'include': ['host1'], }, }, 'actions': { 'pre': { 'a_pre_log_ing': 'echo pre a', }, 'post': { 'a_post_log_ing': 'echo post a', }, 'a_log_ing': 'echo a', }, 'trans': { 't_log_ing': 'echo b', }, 'trans_write': { 'tw_log_ing': 'echo c', }, 'variables': { 'v_log_ing': 'd', }, 'dynvariables': { 'dv_log_ing': 'echo e', }, } # create the imported base config file imported_path = create_fake_config(tmp, configname=self.CONFIG_NAME_2, **imported['config']) # create the importing base config file importing_path = create_fake_config( tmp, configname=self.CONFIG_NAME, import_configs=[self.CONFIG_NAME_2], **importing['config']) # edit the imported config populate_fake_config( imported_path, **{k: v for k, v in imported.items() if k != 'config'}) # edit the importing config populate_fake_config( importing_path, **{k: v for k, v in importing.items() if k != 'config'}) # do the tests importing_cfg = Cfg(importing_path, debug=True) imported_cfg = Cfg(imported_path, debug=True) self.assertIsNotNone(importing_cfg) self.assertIsNotNone(imported_cfg) # test profiles self.assertIsSubset(imported_cfg.profiles, importing_cfg.profiles) # test dotfiles self.assertIsSubset(imported_cfg.dotfiles, importing_cfg.dotfiles) # test actions pre_ed = post_ed = pre_ing = post_ing = {} for k, v in imported_cfg.actions.items(): kind, _ = v if kind == 'pre': pre_ed[k] = v elif kind == 'post': post_ed[k] = v for k, v in importing_cfg.actions.items(): kind, _ = v if kind == 'pre': pre_ing[k] = v elif kind == 'post': post_ing[k] = v self.assertIsSubset(pre_ed, pre_ing) self.assertIsSubset(post_ed, post_ing) # test transactions self.assertIsSubset(imported_cfg.trans_r, importing_cfg.trans_r) self.assertIsSubset(imported_cfg.trans_w, importing_cfg.trans_w) # test variables imported_vars = { k: v for k, v in imported_cfg.variables.items() if not k.startswith('_') } importing_vars = { k: v for k, v in importing_cfg.variables.items() if not k.startswith('_') } self.assertIsSubset(imported_vars, importing_vars) # test prodots self.assertIsSubset(imported_cfg.profiles, importing_cfg.profiles)
def test_import_configs_override(self): """Test import_configs when some config keys overlap.""" tmp = get_tempdir() self.assertTrue(os.path.exists(tmp)) self.addCleanup(clean, tmp) vars_ed = { 'variables': { 'a_var': '33', }, 'dynvariables': { 'a_dynvar': 'echo 33', }, } vars_ing = { 'variables': { 'a_var': 'dd', }, 'dynvariables': { 'a_dynvar': 'echo dd', }, } vars_ed_file = create_yaml_keyval(vars_ed, tmp) vars_ing_file = create_yaml_keyval(vars_ing, tmp) actions_ed = { 'actions': { 'pre': { 'a_pre_action': 'echo pre 22', }, 'post': { 'a_post_action': 'echo post 22', }, 'a_action': 'echo 22', } } actions_ing = { 'actions': { 'pre': { 'a_pre_action': 'echo pre aa', }, 'post': { 'a_post_action': 'echo post aa', }, 'a_action': 'echo aa', } } actions_ed_file = create_yaml_keyval(actions_ed, tmp) actions_ing_file = create_yaml_keyval(actions_ing, tmp) imported = { 'config': { 'dotpath': 'imported', 'backup': False, 'import_variables': [vars_ed_file], 'import_actions': [actions_ed_file], }, 'dotfiles': { 'f_vimrc': { 'dst': '~/.vimrc', 'src': 'vimrc' }, 'f_xinitrc': { 'dst': '~/.xinitrc', 'src': 'xinitrc', 'link': 'link' }, }, 'profiles': { 'host1': { 'dotfiles': ['f_vimrc'], }, 'host2': { 'dotfiles': ['f_xinitrc'], }, }, 'actions': { 'pre': { 'a_pre_log': 'echo pre 2', }, 'post': { 'a_post_log': 'echo post 2', }, 'a_log': 'echo 2', }, 'trans': { 't_log': 'echo 3', }, 'trans_write': { 'tw_log': 'echo 4', }, 'variables': { 'v_log': '42', }, 'dynvariables': { 'dv_log': 'echo 5', }, } importing = { 'config': { 'dotpath': 'importing', 'backup': True, 'import_variables': [vars_ing_file], 'import_actions': [actions_ing_file], }, 'dotfiles': { 'f_xinitrc': { 'dst': '~/.xinitrc', 'src': 'xinitrc' }, }, 'profiles': { 'host2': { 'dotfiles': ['f_xinitrc'], 'include': ['host1'], }, }, 'actions': { 'pre': { 'a_pre_log': 'echo pre a', }, 'post': { 'a_post_log': 'echo post a', }, 'a_log': 'echo a', }, 'trans': { 't_log': 'echo b', }, 'trans_write': { 'tw_log': 'echo c', }, 'variables': { 'v_log': 'd', }, 'dynvariables': { 'dv_log': 'echo e', }, } # create the imported base config file imported_path = create_fake_config(tmp, configname=self.CONFIG_NAME_2, **imported['config']) # create the importing base config file importing_path = create_fake_config(tmp, configname=self.CONFIG_NAME, import_configs=(imported_path, ), **importing['config']) # edit the imported config populate_fake_config( imported_path, **{k: v for k, v in imported.items() if k != 'config'}) # edit the importing config populate_fake_config( importing_path, **{k: v for k, v in importing.items() if k != 'config'}) # do the tests importing_cfg = Cfg(importing_path, debug=True) imported_cfg = Cfg(imported_path, debug=True) self.assertIsNotNone(importing_cfg) self.assertIsNotNone(imported_cfg) # test profiles self.assertIsSubset(imported_cfg.profiles, importing_cfg.profiles) # test dotfiles self.assertEqual(importing_cfg.dotfiles['f_vimrc'], imported_cfg.dotfiles['f_vimrc']) self.assertNotEqual(importing_cfg.dotfiles['f_xinitrc'], imported_cfg.dotfiles['f_xinitrc']) # test actions self.assertFalse( any((imported_cfg.actions[key] == importing_cfg.actions[key]) for key in imported_cfg.actions)) # test transactions self.assertFalse( any(imported_cfg.trans_r[key] == importing_cfg.trans_r[key] for key in imported_cfg.trans_r)) self.assertFalse( any(imported_cfg.trans_w[key] == importing_cfg.trans_w[key] for key in imported_cfg.trans_w)) # test variables # since variables get merged they are # the same in both configs imported_vars = imported_cfg.variables self.assertFalse( any(imported_vars[k] != v for k, v in importing_cfg.variables.items() if not k.startswith('_'))) # test profiles dotfiles self.assertEqual(imported_cfg.profiles['host1']['dotfiles'], importing_cfg.profiles['host1']['dotfiles']) self.assertNotEqual(imported_cfg.profiles['host2']['dotfiles'], importing_cfg.profiles['host2']['dotfiles']) self.assertTrue( set(imported_cfg.profiles['host1']['dotfiles']) < set( importing_cfg.profiles['host2']['dotfiles']))
def test_index(self): # init workingdir = get_tempdir() catalogpath = create_rnd_file(workingdir, 'catalog.json', content='') self.addCleanup(clean, workingdir) dirpath = get_tempdir() self.addCleanup(clean, dirpath) # create 3 files f1 = create_rnd_file(dirpath, 'file1') f2 = create_rnd_file(dirpath, 'file2') f3 = create_rnd_file(dirpath, 'file3') # create 2 directories d1 = create_dir(dirpath, 'dir1') d2 = create_dir(dirpath, 'dir2') # fill directories with files d1f1 = create_rnd_file(d1, 'dir1file1') d1f2 = create_rnd_file(d1, 'dir1file2') d2f1 = create_rnd_file(d2, 'dir2file1') noder = Noder() top = noder.new_top_node() catalog = Catalog(catalogpath, force=True, verbose=False) # create fake args tmpdirname = 'tmpdir' args = { '<path>': dirpath, '<name>': tmpdirname, '--hash': True, '--meta': ['some meta'], '--no-subsize': False, '--verbose': True } # index the directory unix_tree(dirpath) cmd_index(args, noder, catalog, top, debug=True) self.assertTrue(os.stat(catalogpath).st_size != 0) # print catalog noder.print_tree(top) # add some files and directories new1 = create_rnd_file(d1, 'newf1') new2 = create_rnd_file(dirpath, 'newf2') new3 = create_dir(dirpath, 'newd3') new4 = create_dir(d2, 'newd4') new5 = create_rnd_file(new4, 'newf5') unix_tree(dirpath) # modify files EDIT = 'edited' edit_file(d1f1, EDIT) # update storage cmd_update(args, noder, catalog, top, debug=True) # print catalog # print(read_from_file(catalogpath)) noder.print_tree(top) # explore the top node to find all nodes self.assertTrue(len(top.children) == 1) storage = top.children[0] self.assertTrue(len(storage.children) == 7) # ensures files and directories are in names = [node.name for node in anytree.PreOrderIter(storage)] print(names) self.assertTrue(os.path.basename(f1) in names) self.assertTrue(os.path.basename(f2) in names) self.assertTrue(os.path.basename(f3) in names) self.assertTrue(os.path.basename(d1) in names) self.assertTrue(os.path.basename(d1f1) in names) self.assertTrue(os.path.basename(d1f2) in names) self.assertTrue(os.path.basename(d2) in names) self.assertTrue(os.path.basename(d2f1) in names) self.assertTrue(os.path.basename(new1) in names) self.assertTrue(os.path.basename(new2) in names) self.assertTrue(os.path.basename(new3) in names) self.assertTrue(os.path.basename(new4) in names) self.assertTrue(os.path.basename(new5) in names) for node in storage.children: if node.name == os.path.basename(d1): self.assertTrue(len(node.children) == 3) elif node.name == os.path.basename(d2): self.assertTrue(len(node.children) == 2) elif node.name == os.path.basename(new3): self.assertTrue(len(node.children) == 0) elif node.name == os.path.basename(new4): self.assertTrue(len(node.children) == 1) self.assertTrue(read_from_file(d1f1) == EDIT) # remove some files clean(d1f1) clean(d2) clean(new2) clean(new4) # update storage cmd_update(args, noder, catalog, top, debug=True) # ensures files and directories are (not) in names = [node.name for node in anytree.PreOrderIter(storage)] print(names) self.assertTrue(os.path.basename(f1) in names) self.assertTrue(os.path.basename(f2) in names) self.assertTrue(os.path.basename(f3) in names) self.assertTrue(os.path.basename(d1) in names) self.assertTrue(os.path.basename(d1f1) not in names) self.assertTrue(os.path.basename(d1f2) in names) self.assertTrue(os.path.basename(d2) not in names) self.assertTrue(os.path.basename(d2f1) not in names) self.assertTrue(os.path.basename(new1) in names) self.assertTrue(os.path.basename(new2) not in names) self.assertTrue(os.path.basename(new3) in names) self.assertTrue(os.path.basename(new4) not in names) self.assertTrue(os.path.basename(new5) not in names) for node in storage.children: if node.name == os.path.basename(d1): self.assertTrue(len(node.children) == 2) elif node.name == os.path.basename(new3): self.assertTrue(len(node.children) == 0)
def test_ext_config_yaml_not_mix(self): """Test whether the import_configs mixes yaml files upon importing.""" # dotfiles on filesystem src = get_tempdir() self.assertTrue(os.path.exists(src)) self.addCleanup(clean, src) # create some random dotfiles dotfiles = [] for _ in range(3): dotfile, _ = create_random_file(src) dotfiles.append(dotfile) self.addCleanup(clean, dotfile) self.assertTrue(all(map(os.path.exists, dotfiles))) # create dotdrop home dotdrop_home = get_tempdir() self.assertTrue(os.path.exists(dotdrop_home)) self.addCleanup(clean, dotdrop_home) dotpath_ed = 'imported' imported = { 'config': { 'dotpath': dotpath_ed, }, 'dotfiles': {}, 'profiles': { 'host1': { 'dotfiles': [], }, }, 'actions': { 'pre': { 'a_pre_log_ed': 'echo pre 2', }, 'post': { 'a_post_log_ed': 'echo post 2', }, 'a_log_ed': 'echo 2', }, 'trans': { 't_log_ed': 'echo 3', }, 'trans_write': { 'tw_log_ed': 'echo 4', }, 'variables': { 'v_log_ed': '42', }, 'dynvariables': { 'dv_log_ed': 'echo 5', }, } dotpath_ing = 'importing' importing = { 'config': { 'dotpath': dotpath_ing, }, 'dotfiles': {}, 'profiles': { 'host2': { 'dotfiles': [], 'include': ['host1'], }, }, 'actions': { 'pre': { 'a_pre_log_ing': 'echo pre a', }, 'post': { 'a_post_log_ing': 'echo post a', }, 'a_log_ing': 'echo a', }, 'trans': { 't_log_ing': 'echo b', }, 'trans_write': { 'tw_log_ing': 'echo c', }, 'variables': { 'v_log_ing': 'd', }, 'dynvariables': { 'dv_log_ing': 'echo e', }, } dotfiles_ing, dotfiles_ed = dotfiles[:-1], dotfiles[-1:] # create the imported base config file imported_path = create_fake_config(dotdrop_home, configname='config-2.yaml', **imported['config']) # create the importing base config file importing_path = create_fake_config(dotdrop_home, configname='config.yaml', import_configs=['config-2.yaml'], **importing['config']) # edit the imported config populate_fake_config( imported_path, **{k: v for k, v in imported.items() if k != 'config'}) # edit the importing config populate_fake_config( importing_path, **{k: v for k, v in importing.items() if k != 'config'}) # import the dotfiles o = load_options(imported_path, 'host1') o.import_path = dotfiles_ed cmd_importer(o) o = load_options(importing_path, 'host2') o.import_path = dotfiles_ing cmd_importer(o) # reload the config o = load_options(importing_path, 'host2') # test imported config y = self.load_yaml(imported_path) # testing dotfiles self.assertTrue(all(file_in_yaml(y, df) for df in dotfiles_ed)) self.assertFalse(any(file_in_yaml(y, df) for df in dotfiles_ing)) # testing profiles profiles = y['profiles'].keys() self.assertTrue('host1' in profiles) self.assertFalse('host2' in profiles) # testing actions actions = y['actions']['pre'] actions.update(y['actions']['post']) actions.update({ k: v for k, v in y['actions'].items() if k not in ('pre', 'post') }) actions = actions.keys() self.assertTrue(all(a.endswith('ed') for a in actions)) self.assertFalse(any(a.endswith('ing') for a in actions)) # testing transformations transformations = y['trans_read'].keys() self.assertTrue(all(t.endswith('ed') for t in transformations)) self.assertFalse(any(t.endswith('ing') for t in transformations)) transformations = y['trans_write'].keys() self.assertTrue(all(t.endswith('ed') for t in transformations)) self.assertFalse(any(t.endswith('ing') for t in transformations)) # testing variables variables = self._remove_priv_vars(y['variables'].keys()) self.assertTrue(all(v.endswith('ed') for v in variables)) self.assertFalse(any(v.endswith('ing') for v in variables)) dyn_variables = y['dynvariables'].keys() self.assertTrue(all(dv.endswith('ed') for dv in dyn_variables)) self.assertFalse(any(dv.endswith('ing') for dv in dyn_variables)) # test importing config y = self.load_yaml(importing_path) # testing dotfiles self.assertTrue(all(file_in_yaml(y, df) for df in dotfiles_ing)) self.assertFalse(any(file_in_yaml(y, df) for df in dotfiles_ed)) # testing profiles profiles = y['profiles'].keys() self.assertTrue('host2' in profiles) self.assertFalse('host1' in profiles) # testing actions actions = y['actions']['pre'] actions.update(y['actions']['post']) actions.update({ k: v for k, v in y['actions'].items() if k not in ('pre', 'post') }) actions = actions.keys() self.assertTrue(all(action.endswith('ing') for action in actions)) self.assertFalse(any(action.endswith('ed') for action in actions)) # testing transformations transformations = y['trans_read'].keys() self.assertTrue(all(t.endswith('ing') for t in transformations)) self.assertFalse(any(t.endswith('ed') for t in transformations)) transformations = y['trans_write'].keys() self.assertTrue(all(t.endswith('ing') for t in transformations)) self.assertFalse(any(t.endswith('ed') for t in transformations)) # testing variables variables = self._remove_priv_vars(y['variables'].keys()) self.assertTrue(all(v.endswith('ing') for v in variables)) self.assertFalse(any(v.endswith('ed') for v in variables)) dyn_variables = y['dynvariables'].keys() self.assertTrue(all(dv.endswith('ing') for dv in dyn_variables)) self.assertFalse(any(dv.endswith('ed') for dv in dyn_variables))
def test_install(self): """Test the install function""" # dotpath location tmp = get_tempdir() self.assertTrue(os.path.exists(tmp)) self.addCleanup(clean, tmp) # where dotfiles will be installed dst = get_tempdir() self.assertTrue(os.path.exists(dst)) self.addCleanup(clean, dst) # create the dotfile in dotdrop f1, c1 = create_random_file(tmp) dst1 = os.path.join(dst, get_string(6)) d1 = Dotfile(get_string(5), dst1, os.path.basename(f1)) # fake a print self.assertTrue(str(d1) != '') f2, c2 = create_random_file(tmp) dst2 = os.path.join(dst, get_string(6)) d2 = Dotfile(get_string(5), dst2, os.path.basename(f2)) with open(f2, 'w') as f: f.write(self.TEMPLATE) f3, _ = create_random_file(tmp, binary=True) dst3 = os.path.join(dst, get_string(6)) d3 = Dotfile(get_string(5), dst3, os.path.basename(f3)) # create a directory dotfile dir1 = os.path.join(tmp, 'somedir') create_dir(dir1) fd, _ = create_random_file(dir1) dstd = os.path.join(dst, get_string(6)) ddot = Dotfile(get_string(5), dstd, os.path.basename(dir1)) # to test backup f4, c4 = create_random_file(tmp) dst4 = os.path.join(dst, get_string(6)) d4 = Dotfile(key=get_string(6), dst=dst4, src=os.path.basename(f4)) with open(dst4, 'w') as f: f.write(get_string(16)) # to test link f5, c5 = create_random_file(tmp) dst5 = os.path.join(dst, get_string(6)) self.addCleanup(clean, dst5) d5 = Dotfile(get_string(6), dst5, os.path.basename(f5), link=True) # create the dotfile directories in dotdrop dir1 = create_dir(os.path.join(tmp, get_string(6))) self.assertTrue(os.path.exists(dir1)) self.addCleanup(clean, dir1) dst6 = os.path.join(dst, get_string(6)) # fill with files sub1, _ = create_random_file(dir1, template=True) self.assertTrue(os.path.exists(sub1)) sub2, _ = create_random_file(dir1) self.assertTrue(os.path.exists(sub2)) # make up the dotfile d6 = Dotfile(get_string(6), dst6, os.path.basename(dir1)) # to test symlink directories dir2 = create_dir(os.path.join(tmp, get_string(6))) self.assertTrue(os.path.exists(dir2)) self.addCleanup(clean, dir2) dst7 = os.path.join(dst, get_string(6)) # fill with files sub3, _ = create_random_file(dir2) self.assertTrue(os.path.exists(sub3)) sub4, _ = create_random_file(dir2) self.assertTrue(os.path.exists(sub4)) # make up the dotfile d7 = Dotfile(get_string(6), dst7, os.path.basename(dir2), link=True) # to test actions value = get_string(12) fact = '/tmp/action' self.addCleanup(clean, fact) act1 = Action('testaction', 'post', 'echo "{}" > {}'.format(value, fact)) f8, c8 = create_random_file(tmp) dst8 = os.path.join(dst, get_string(6)) d8 = Dotfile(get_string(6), dst8, os.path.basename(f8), actions=[act1]) # to test transformations trans1 = 'trans1' trans2 = 'trans2' cmd = 'cat {0} | sed \'s/%s/%s/g\' > {1}' % (trans1, trans2) tr = Action('testtrans', 'post', cmd) f9, c9 = create_random_file(tmp, content=trans1) dst9 = os.path.join(dst, get_string(6)) d9 = Dotfile(get_string(6), dst9, os.path.basename(f9), trans_r=tr) # to test template f10, _ = create_random_file(tmp, content='{{@@ header() @@}}') dst10 = os.path.join(dst, get_string(6)) d10 = Dotfile(get_string(6), dst10, os.path.basename(f10)) # generate the config and stuff profile = get_string(5) confpath = os.path.join(tmp, self.CONFIG_NAME) dotfiles = [d1, d2, d3, d4, d5, d6, d7, d8, d9, d10, ddot] self.fake_config(confpath, dotfiles, profile, tmp, [act1], [tr]) conf = Cfg(confpath) self.assertTrue(conf is not None) # install them conf, opts = load_config(confpath, profile) opts['safe'] = False opts['debug'] = True opts['showdiff'] = True opts['variables'] = {} cmd_install(opts, conf) # now compare the generated files self.assertTrue(os.path.exists(dst1)) self.assertTrue(os.path.exists(dst2)) self.assertTrue(os.path.exists(dst3)) self.assertTrue(os.path.exists(dst5)) self.assertTrue(os.path.exists(dst6)) self.assertTrue(os.path.exists(dst7)) self.assertTrue(os.path.exists(dst8)) self.assertTrue(os.path.exists(dst10)) self.assertTrue(os.path.exists(fd)) # check if 'dst5' is a link whose target is 'f5' self.assertTrue(os.path.islink(dst5)) self.assertTrue(os.path.realpath(dst5) == os.path.realpath(f5)) # check if 'dst7' is a link whose target is 'dir2' self.assertTrue(os.path.islink(dst7)) self.assertTrue(os.path.realpath(dst7) == os.path.realpath(dir2)) # make sure backup is there b = dst4 + Installer.BACKUP_SUFFIX self.assertTrue(os.path.exists(b)) self.assertTrue(filecmp.cmp(f1, dst1, shallow=True)) f2content = open(dst2, 'r').read() self.assertTrue(f2content == self.RESULT) self.assertTrue(filecmp.cmp(f3, dst3, shallow=True)) # test action has been executed self.assertTrue(os.path.exists(fact)) self.assertTrue(str(act1) != '') actcontent = open(fact, 'r').read().rstrip() self.assertTrue(actcontent == value) # test transformation has been done self.assertTrue(os.path.exists(dst9)) transcontent = open(dst9, 'r').read().rstrip() self.assertTrue(transcontent == trans2) # test template has been remplaced self.assertTrue(os.path.exists(dst10)) tempcontent = open(dst10, 'r').read().rstrip() self.assertTrue(tempcontent == header())
def test_remove(self): """test the remove command""" # dotfiles in dotpath dotdrop_home = get_tempdir() self.assertTrue(os.path.exists(dotdrop_home)) self.addCleanup(clean, dotdrop_home) dotfilespath = os.path.join(dotdrop_home, 'dotfiles') confpath = os.path.join(dotdrop_home, 'config.yaml') create_dir(dotfilespath) df1, _ = create_random_file(dotfilespath) df2, _ = create_random_file(dotfilespath) df3, _ = create_random_file(dotfilespath) configdic = { 'config': { 'dotpath': 'dotfiles', }, 'dotfiles': { 'f_test1': { 'src': df1, 'dst': '/dev/null' }, 'f_test2': { 'src': df2, 'dst': '/dev/null' }, 'f_test3': { 'src': df3, 'dst': '/tmp/some-fake-path' }, }, 'profiles': { 'host1': { 'dotfiles': ['f_test1', 'f_test2', 'f_test3'], }, 'host2': { 'dotfiles': ['f_test1'], }, 'host3': { 'dotfiles': ['f_test2'], }, }, } yaml_dump(configdic, confpath) o = load_options(confpath, 'host1') o.remove_path = ['f_test1'] o.remove_iskey = True o.debug = True o.safe = False # by key cmd_remove(o) # ensure file is deleted self.assertFalse(os.path.exists(df1)) self.assertTrue(os.path.exists(df2)) self.assertTrue(os.path.exists(df3)) # load dict y = yaml_load(confpath) # ensure not present self.assertTrue('f_test1' not in y['dotfiles']) self.assertTrue('f_test1' not in y['profiles']['host1']['dotfiles']) self.assertTrue('host2' not in y['profiles']) # assert rest is intact self.assertTrue('f_test2' in y['dotfiles'].keys()) self.assertTrue('f_test3' in y['dotfiles'].keys()) self.assertTrue('f_test2' in y['profiles']['host1']['dotfiles']) self.assertTrue('f_test3' in y['profiles']['host1']['dotfiles']) self.assertTrue(y['profiles']['host3']['dotfiles'] == ['f_test2']) o = load_options(confpath, 'host1') o.remove_path = ['/tmp/some-fake-path'] o.remove_iskey = False o.debug = True o.safe = False # by path cmd_remove(o) # ensure file is deleted self.assertTrue(os.path.exists(df2)) self.assertFalse(os.path.exists(df3)) # load dict y = yaml_load(confpath) # ensure not present self.assertTrue('f_test3' not in y['dotfiles']) self.assertTrue('f_test3' not in y['profiles']['host1']['dotfiles']) # assert rest is intact self.assertTrue('host1' in y['profiles'].keys()) self.assertFalse('host2' in y['profiles'].keys()) self.assertTrue('host3' in y['profiles'].keys()) self.assertTrue(y['profiles']['host1']['dotfiles'] == ['f_test2']) self.assertTrue(y['profiles']['host3']['dotfiles'] == ['f_test2'])