def gen(basepath, destpath, profile=None): basepath = Path(basepath) destpath = Path(destpath) configpath = basepath + 'conf.yaml' confall = yaml.load(io.open(configpath, 'rt', encoding='utf-8')) conf = confall['base'] if profile and profile in confall: conf.update(confall[profile]) tixurl = conf['tixurl'] changelogdata = read_changelog_file(str(basepath + conf['changelog'])) changelog = render_changelog(changelogdata, tixurl) if 'env' in conf: envpath = basepath + conf['env'] env = yaml.load(io.open(envpath, 'rt', encoding='utf-8')) else: env = {} env['changelog'] = changelog pagespath = basepath + conf['pages'] if 'basepages' in conf: fallbackpath = basepath + conf['basepages'] else: fallbackpath = None pagedatas = yaml.load(io.open(pagespath, 'rt', encoding='utf-8')) pages = [MainPage(pagedata, pagespath=pagespath[:-1], fallbackpath=fallbackpath) for pagedata in pagedatas] skelpath = basepath + Path(conf['skeleton']) if not io.exists(destpath): print("Copying skeleton") io.copytree(skelpath, destpath) pages[0].meta = conf.get('firstpage_meta', '') for i, page in enumerate(pages): print("Rendering {0}".format(page.name)) page.render(destpath, pages, env)
def check(str_date, expected_date): # To test the date format guessing part, we create a QIF, which uses date guessing. app = TestApp() contents = "!Type:Bank\nD{str_date}\nT42.32\n^".format(str_date=str_date) hsio.open(filepath, 'wt', encoding='utf-8').write(contents) app.doc.parse_file_for_import(filepath) eq_(app.itable[0].date_import, expected_date)
def gen(basepath, destpath, profile=None): basepath = Path(basepath) destpath = Path(destpath) configpath = basepath + 'conf.yaml' confall = yaml.load(io.open(configpath, 'rt', encoding='utf-8')) conf = confall['base'] if profile and profile in confall: conf.update(confall[profile]) tixurl = conf['tixurl'] changelogdata = read_changelog_file(str(basepath + conf['changelog'])) changelog = render_changelog(changelogdata, tixurl) if 'env' in conf: envpath = basepath + conf['env'] env = yaml.load(io.open(envpath, 'rt', encoding='utf-8')) else: env = {} env['changelog'] = changelog pagespath = basepath + conf['pages'] if 'basepages' in conf: fallbackpath = basepath + conf['basepages'] else: fallbackpath = None pagedatas = yaml.load(io.open(pagespath, 'rt', encoding='utf-8')) pages = [ MainPage(pagedata, pagespath=pagespath[:-1], fallbackpath=fallbackpath) for pagedata in pagedatas ] skelpath = basepath + Path(conf['skeleton']) if not io.exists(destpath): print("Copying skeleton") io.copytree(skelpath, destpath) pages[0].meta = conf.get('firstpage_meta', '') for i, page in enumerate(pages): print("Rendering {0}".format(page.name)) page.render(destpath, pages, env)
def _read_info(self, field): super(File, self)._read_info(field) if field in ('size', 'ctime', 'mtime'): stats = io.stat(self.path) self.size = nonone(stats.st_size, 0) self.ctime = nonone(stats.st_ctime, 0) self.mtime = nonone(stats.st_mtime, 0) elif field == 'md5partial': try: fp = io.open(self.path, 'rb') offset = self._md5partial_offset size = self._md5partial_size fp.seek(offset) partialdata = fp.read(size) md5 = hashlib.md5(partialdata) self.md5partial = md5.digest() fp.close() except Exception: pass elif field == 'md5': try: fp = io.open(self.path, 'rb') filedata = fp.read() md5 = hashlib.md5(filedata) self.md5 = md5.digest() fp.close() except Exception: pass
def test_ignore_hardlink_matches(self, tmpdir): # If the ignore_hardlink_matches option is set, don't match files hardlinking to the same # inode. tmppath = Path(str(tmpdir)) io.open(tmppath + 'myfile', 'w').write('foo') os.link(str(tmppath + 'myfile'), str(tmppath + 'hardlink')) app = TestApp().app app.directories.add_path(tmppath) app.scanner.scan_type = ScanType.Contents app.options['ignore_hardlink_matches'] = True app.start_scanning() eq_(len(app.results.groups), 0)
def render(self, destpath, menu, env): dest = destpath + self.basepath + '{0}.htm'.format(self.basename) if not io.exists(dest[:-1]): io.makedirs(dest[:-1]) mdcontents = io.open(self.path, 'rt', encoding='utf-8').read() mdcontents = mdcontents.format(**env) main_contents = markdown.markdown(mdcontents) rendered = MAIN_CONTENTS.format(meta=self.meta, title=self.title, relpath=self.relpath, menu=menu, contents=main_contents) fp = io.open(dest, 'wt', encoding='utf-8') fp.write(rendered) fp.close()
def test_copy_or_move_clean_empty_dirs(self, tmpdir, monkeypatch): tmppath = Path(str(tmpdir)) sourcepath = tmppath + 'source' io.mkdir(sourcepath) io.open(sourcepath + 'myfile', 'w') app = TestApp().app app.directories.add_path(tmppath) [myfile] = app.directories.get_files() monkeypatch.setattr(app, 'clean_empty_dirs', log_calls(lambda path: None)) app.copy_or_move(myfile, False, tmppath + 'dest', 0) calls = app.clean_empty_dirs.calls eq_(1, len(calls)) eq_(sourcepath, calls[0]['path'])
def test_dont_group_files_that_dont_exist(tmpdir): # when creating groups, check that files exist first. It's possible that these files have # been moved during the scan by the user. # In this test, we have to delete one of the files between the get_matches() part and the # get_groups() part. s = Scanner() s.scan_type = ScanType.Contents p = Path(str(tmpdir)) io.open(p + 'file1', 'w').write('foo') io.open(p + 'file2', 'w').write('foo') file1, file2 = fs.get_files(p) def getmatches(*args, **kw): io.remove(file2.path) return [Match(file1, file2, 100)] s._getmatches = getmatches assert not s.get_dupe_groups([file1, file2])
def test_copy_or_move(self, tmpdir, monkeypatch): # The goal here is just to have a test for a previous blowup I had. I know my test coverage # for this unit is pathetic. What's done is done. My approach now is to add tests for # every change I want to make. The blowup was caused by a missing import. p = Path(str(tmpdir)) io.open(p + 'foo', 'w').close() monkeypatch.setattr(hscommon.conflict, 'smart_copy', log_calls(lambda source_path, dest_path: None)) # XXX This monkeypatch is temporary. will be fixed in a better monkeypatcher. monkeypatch.setattr(app, 'smart_copy', hscommon.conflict.smart_copy) monkeypatch.setattr(os, 'makedirs', lambda path: None) # We don't want the test to create that fake directory dgapp = TestApp().app dgapp.directories.add_path(p) [f] = dgapp.directories.get_files() dgapp.copy_or_move(f, True, 'some_destination', 0) eq_(1, len(hscommon.conflict.smart_copy.calls)) call = hscommon.conflict.smart_copy.calls[0] eq_(call['dest_path'], op.join('some_destination', 'foo')) eq_(call['source_path'], f.path)
def setup_module(module): # In this unit, we have tests depending on two directory structure. One with only one file in it # and another with a more complex structure. testpath = Path(tempfile.mkdtemp()) module.testpath = testpath rootpath = testpath + 'onefile' io.mkdir(rootpath) fp = io.open(rootpath + 'test.txt', 'w') fp.write('test_data') fp.close() create_fake_fs(testpath)
def test_default_path_state_override(tmpdir): # It's possible for a subclass to override the default state of a path class MyDirectories(Directories): def _default_state_for_path(self, path): if 'foobar' in path: return DirectoryState.Excluded d = MyDirectories() p1 = Path(str(tmpdir)) io.mkdir(p1 + 'foobar') io.open(p1 + 'foobar/somefile', 'w').close() io.mkdir(p1 + 'foobaz') io.open(p1 + 'foobaz/somefile', 'w').close() d.add_path(p1) eq_(d.get_state(p1 + 'foobaz'), DirectoryState.Normal) eq_(d.get_state(p1 + 'foobar'), DirectoryState.Excluded) eq_(len(list(d.get_files())), 1) # only the 'foobaz' file is there # However, the default state can be changed d.set_state(p1 + 'foobar', DirectoryState.Normal) eq_(d.get_state(p1 + 'foobar'), DirectoryState.Normal) eq_(len(list(d.get_files())), 2)
def create_fake_fs(rootpath): # We have it as a separate function because other units are using it. rootpath = rootpath + 'fs' io.mkdir(rootpath) io.mkdir(rootpath + 'dir1') io.mkdir(rootpath + 'dir2') io.mkdir(rootpath + 'dir3') fp = io.open(rootpath + 'file1.test', 'w') fp.write('1') fp.close() fp = io.open(rootpath + 'file2.test', 'w') fp.write('12') fp.close() fp = io.open(rootpath + 'file3.test', 'w') fp.write('123') fp.close() fp = io.open(rootpath + ('dir1', 'file1.test'), 'w') fp.write('1') fp.close() fp = io.open(rootpath + ('dir2', 'file2.test'), 'w') fp.write('12') fp.close() fp = io.open(rootpath + ('dir3', 'file3.test'), 'w') fp.write('123') fp.close() return rootpath
def get_iphoto_or_aperture_pictures(plistpath, photo_class): # The structure of iPhoto and Aperture libraries for the base photo list are excactly the same. if not io.exists(plistpath): return [] s = io.open(plistpath, 'rt', encoding='utf-8').read() # There was a case where a guy had 0x10 chars in his plist, causing expat errors on loading s = remove_invalid_xml(s, replace_with='') # It seems that iPhoto sometimes doesn't properly escape & chars. The regexp below is to find # any & char that is not a &-based entity (&, ", etc.). based on TextMate's XML # bundle's regexp s, count = re.subn(r'&(?![a-zA-Z0-9_-]+|#[0-9]+|#x[0-9a-fA-F]+;)', '', s) if count: logging.warning("%d invalid XML entities replacement made", count) plist = plistlib.readPlistFromBytes(s.encode('utf-8')) result = [] for key, photo_data in plist['Master Image List'].items(): if photo_data['MediaType'] != 'Image': continue photo_path = Path(photo_data['ImagePath']) photo = photo_class(photo_path, key) result.append(photo) return result
def get_iphoto_or_aperture_pictures(plistpath, photo_class): # The structure of iPhoto and Aperture libraries for the base photo list are excactly the same. if not io.exists(plistpath): return [] s = io.open(plistpath, "rt", encoding="utf-8").read() # There was a case where a guy had 0x10 chars in his plist, causing expat errors on loading s = remove_invalid_xml(s, replace_with="") # It seems that iPhoto sometimes doesn't properly escape & chars. The regexp below is to find # any & char that is not a &-based entity (&, ", etc.). based on TextMate's XML # bundle's regexp s, count = re.subn(r"&(?![a-zA-Z0-9_-]+|#[0-9]+|#x[0-9a-fA-F]+;)", "", s) if count: logging.warning("%d invalid XML entities replacement made", count) plist = plistlib.readPlistFromBytes(s.encode("utf-8")) result = [] for key, photo_data in plist["Master Image List"].items(): if photo_data["MediaType"] != "Image": continue photo_path = Path(photo_data["ImagePath"]) photo = photo_class(photo_path, key) result.append(photo) return result
def create_unicode_test_dir(rootpath): io.mkdir(rootpath + '\xe9_dir') io.open(rootpath + '\xe9_file', 'w').close() io.open(rootpath + ('\xe9_dir', '\xe9_file'), 'w').close()