def test_option_on(self, do_setup): self.app.options['clean_empty_dirs'] = True self.app.clean_empty_dirs(Path('/foo/bar')) calls = hscommon.util.delete_if_empty.calls eq_(1, len(calls)) eq_(Path('/foo/bar'), calls[0]['path']) eq_(['.DS_Store'], calls[0]['files_to_delete'])
def gen(basepath, destpath, profile=None): basepath = Path(basepath) destpath = Path(destpath) configpath = basepath + 'conf.yaml' confall = yaml.load(io.open(configpath, 'rt', encoding='utf-8')) conf = confall['base'] if profile and profile in confall: conf.update(confall[profile]) tixurl = conf['tixurl'] changelogdata = read_changelog_file(str(basepath + conf['changelog'])) changelog = render_changelog(changelogdata, tixurl) if 'env' in conf: envpath = basepath + conf['env'] env = yaml.load(io.open(envpath, 'rt', encoding='utf-8')) else: env = {} env['changelog'] = changelog pagespath = basepath + conf['pages'] if 'basepages' in conf: fallbackpath = basepath + conf['basepages'] else: fallbackpath = None pagedatas = yaml.load(io.open(pagespath, 'rt', encoding='utf-8')) pages = [ MainPage(pagedata, pagespath=pagespath[:-1], fallbackpath=fallbackpath) for pagedata in pagedatas ] skelpath = basepath + Path(conf['skeleton']) if not io.exists(destpath): print("Copying skeleton") io.copytree(skelpath, destpath) pages[0].meta = conf.get('firstpage_meta', '') for i, page in enumerate(pages): print("Rendering {0}".format(page.name)) page.render(destpath, pages, env)
def load_from_file(self, infile): """Load folder selection from ``infile``. :param file infile: path or file pointer to XML generated through :meth:`save_to_file` """ try: root = ET.parse(infile).getroot() except Exception: return for rdn in root.getiterator('root_directory'): attrib = rdn.attrib if 'path' not in attrib: continue path = attrib['path'] try: self.add_path(Path(path)) except (AlreadyThereError, InvalidPathError): pass for sn in root.getiterator('state'): attrib = sn.attrib if not ('path' in attrib and 'value' in attrib): continue path = attrib['path'] state = attrib['value'] self.set_state(Path(path), int(state))
def test_option_on(self, do_setup): self.app.options["clean_empty_dirs"] = True self.app.clean_empty_dirs(Path("/foo/bar")) calls = hscommon.util.delete_if_empty.calls eq_(1, len(calls)) eq_(Path("/foo/bar"), calls[0]["path"]) eq_([".DS_Store"], calls[0]["files_to_delete"])
def load_from_file(self, infile): """Load folder selection from ``infile``. :param file infile: path or file pointer to XML generated through :meth:`save_to_file` """ try: root = ET.parse(infile).getroot() except Exception: return for rdn in root.getiterator("root_directory"): attrib = rdn.attrib if "path" not in attrib: continue path = attrib["path"] try: self.add_path(Path(path)) except (AlreadyThereError, InvalidPathError): pass for sn in root.getiterator("state"): attrib = sn.attrib if not ("path" in attrib and "value" in attrib): continue path = attrib["path"] state = attrib["value"] self.states[Path(path)] = int(state)
def get_iphoto_or_aperture_pictures(plistpath: Path, photo_class): # The structure of iPhoto and Aperture libraries for the base photo list are excactly the same. if not plistpath.exists(): return [] s = plistpath.open("rt", encoding="utf-8").read() # There was a case where a guy had 0x10 chars in his plist, causing expat errors on loading s = remove_invalid_xml(s, replace_with="") # It seems that iPhoto sometimes doesn't properly escape & chars. The regexp below is to find # any & char that is not a &-based entity (&, ", etc.). based on TextMate's XML # bundle's regexp s, count = re.subn(r"&(?![a-zA-Z0-9_-]+|#[0-9]+|#x[0-9a-fA-F]+;)", "", s) if count: logging.warning("%d invalid XML entities replacement made", count) parser = IPhotoPlistParser() try: plist = parser.parse(io.BytesIO(s.encode("utf-8"))) except Exception: logging.warning("iPhoto plist parsing choked on data: %r", parser.lastdata) raise result = [] for key, photo_data in plist["Master Image List"].items(): if photo_data["MediaType"] != "Image": continue photo_path = Path(photo_data["ImagePath"]) photo = photo_class(photo_path, key) result.append(photo) return result
def get_iphoto_or_aperture_pictures(plistpath: Path, photo_class): # The structure of iPhoto and Aperture libraries for the base photo list are excactly the same. if not plistpath.exists(): return [] s = plistpath.open('rt', encoding='utf-8').read() # There was a case where a guy had 0x10 chars in his plist, causing expat errors on loading s = remove_invalid_xml(s, replace_with='') # It seems that iPhoto sometimes doesn't properly escape & chars. The regexp below is to find # any & char that is not a &-based entity (&, ", etc.). based on TextMate's XML # bundle's regexp s, count = re.subn(r'&(?![a-zA-Z0-9_-]+|#[0-9]+|#x[0-9a-fA-F]+;)', '', s) if count: logging.warning("%d invalid XML entities replacement made", count) parser = IPhotoPlistParser() try: plist = parser.parse(io.BytesIO(s.encode('utf-8'))) except Exception: logging.warning("iPhoto plist parsing choked on data: %r", parser.lastdata) raise result = [] for key, photo_data in plist['Master Image List'].items(): if photo_data['MediaType'] != 'Image': continue photo_path = Path(photo_data['ImagePath']) photo = photo_class(photo_path, key) result.append(photo) return result
def test_find_path(self): n = Node(None,'') foo = Node(n,'foo') bar = Node(foo,'bar') assert n.find_path(Path('foo')) is foo assert n.find_path(Path('bar')) is None assert n.find_path(Path(())) is n assert n.find_path(Path(('foo','bar'))) is bar
def test_tie_breaker_path_deepness(fake_fileexists): # If there is a tie in prioritization, path deepness is used as a tie breaker s = Scanner() o1, o2 = no('foo'), no('foo') o1.path = Path('foo') o2.path = Path('foo/bar') [group] = s.get_dupe_groups([o1, o2]) assert group.ref is o2
def test_tie_breaker_copy(fake_fileexists): # if copy is in the words used (even if it has a deeper path), it becomes a dupe s = Scanner() o1, o2 = no('foo bar Copy'), no('foo bar') o1.path = Path('deeper/path') o2.path = Path('foo') [group] = s.get_dupe_groups([o1, o2]) assert group.ref is o2
def __init__(self, name="foobar", size=1, path=None): if path is None: path = Path(name) else: path = Path(path)[name] self.name = name self.size = size self.path = path self.words = getwords(name)
def test_invalidate_cache_on_set(self): p = Path(('foo', 'bar')) root = Root(threaded=False) v = root.new_directory('foo') v.initial_path = p eq_(p, v.initial_path) p = Path(('foo', 'baz')) v.initial_path = p eq_(p, v.initial_path)
def test_source_same_as_dest(self, dosetup): ref = TestDir(None,'reference') ref.AddDir('dir').AddFile('subfile') ref.AddFile('file') copy = TestDir(None,'copy') copy.copy(ref) copy.files[0].rename('renamed') bo = BatchOperation(copy,Path('reference')) expected = [(Path(('reference','file')),Path(('reference','renamed')))] eq_(expected,bo.name_list)
def test_removable(self, tmpdir): # if the volume is removable, is_available is true when the disc is in place self.v.vol_type = VOLTYPE_CDROM rootpath = str(tmpdir) self.root.buffer_path = Path(rootpath) # create a directory with self.v.name as a name to emulate the insertion of the CD in /Volumes os.mkdir(op.join(rootpath, self.v.name)) self.v.initial_path = Path( '/does/not/exist') # physical_path should be read assert self.v.is_available
def __init__(self, pagedata, pagespath, fallbackpath): self.name = pagedata['name'] self.basename = Path(self.name)[-1] self.basepath = Path(self.name)[:-1] self.path = pagespath + self.basepath + '{}.md'.format(self.basename) if not io.exists(self.path): self.path = fallbackpath + self.basepath + '{}.md'.format( self.basename) self.title = pagedata['title'] self.relpath = '../' * len(self.basepath) self.meta = ''
def test_renamed_as_list(self, dosetup): ref = TestDir(None,'reference') ref.AddDir('dir').AddFile('subfile') ref.AddFile('file') copy = TestDir(None,'copy') copy.copy(ref) bo = BatchOperation(copy.allfiles,Path('destination')) expected = [ (Path(('reference','file')),Path(('destination','file'))), (Path(('reference','dir','subfile')),Path(('destination','dir','subfile'))), ] eq_(expected,bo.name_list)
def pytest_funcarg__dosetup(self, request): monkeypatch = request.getfuncargvalue('monkeypatch') tmpdir = request.getfuncargvalue('tmpdir') monkeypatch.setattr(sys, 'getfilesystemencoding', lambda: 'ascii') # force a failure on any non-ascii char testpath = Path(str(tmpdir)) create_unicode_test_dir(testpath) sourcedir = phys.Directory(None, str(testpath)) copy = manualfs.Directory(None, '') copy.copy(sourcedir) destpath = Path(str(tmpdir)) self.sourcedircopy = copy self.destpath = destpath
def test_unicode_save(tmpdir): d = Directories() p1 = Path(str(tmpdir))["hello\xe9"] p1.mkdir() p1["foo\xe9"].mkdir() d.add_path(p1) d.set_state(p1["foo\xe9"], DirectoryState.Excluded) tmpxml = str(tmpdir.join("directories_testunit.xml")) try: d.save_to_file(tmpxml) except UnicodeDecodeError: assert False
def test_unicode_save(tmpdir): d = Directories() p1 = Path(str(tmpdir))['hello\xe9'] p1.mkdir() p1['foo\xe9'].mkdir() d.add_path(p1) d.set_state(p1['foo\xe9'], DirectoryState.Excluded) tmpxml = str(tmpdir.join('directories_testunit.xml')) try: d.save_to_file(tmpxml) except UnicodeDecodeError: assert False
def testadd_path(): root = TestDir(None, 'root') mypath = 'foo/bar' addeddir = root.add_path(Path(mypath)) eq_(addeddir.name, 'bar') eq_(addeddir.path, ('root', 'foo', 'bar')) eq_(addeddir.parent.name, 'foo') assert root.find_sub_dir('foo') is not None eq_(root.find_sub_dir('FOO'), None) root.AddFile('bar') try: addeddir = root.add_path(Path('bar/foo')) raise AssertionError() except fs.InvalidPath: pass
def test_load_from_file_with_invalid_path(tmpdir): #This test simulates a load from file resulting in a #InvalidPath raise. Other directories must be loaded. d1 = Directories() d1.add_path(testpath['onefile']) #Will raise InvalidPath upon loading p = Path(str(tmpdir.join('toremove'))) p.mkdir() d1.add_path(p) p.rmdir() tmpxml = str(tmpdir.join('directories_testunit.xml')) d1.save_to_file(tmpxml) d2 = Directories() d2.load_from_file(tmpxml) eq_(1, len(d2))
def test_recurse_up(self, do_setup, monkeypatch): # delete_if_empty must be recursively called up in the path until it returns False @log_calls def mock_delete_if_empty(path, files_to_delete=[]): return len(path) > 1 monkeypatch.setattr(hscommon.util, 'delete_if_empty', mock_delete_if_empty) # XXX This monkeypatch is temporary. will be fixed in a better monkeypatcher. monkeypatch.setattr(app, 'delete_if_empty', mock_delete_if_empty) self.app.options['clean_empty_dirs'] = True self.app.clean_empty_dirs(Path('not-empty/empty/empty')) calls = hscommon.util.delete_if_empty.calls eq_(3, len(calls)) eq_(Path('not-empty/empty/empty'), calls[0]['path']) eq_(Path('not-empty/empty'), calls[1]['path']) eq_(Path('not-empty'), calls[2]['path'])
def test_partial_md5_aggregate_subfile_sorted(tmpdir): p = create_fake_fs_with_random_data(Path(str(tmpdir))) b = fs.Folder(p) md51 = fs.File(p["dir1"]["file1.test"]).md5partial md52 = fs.File(p["dir2"]["file2.test"]).md5partial md53 = fs.File(p["dir3"]["file3.test"]).md5partial md54 = fs.File(p["file1.test"]).md5partial md55 = fs.File(p["file2.test"]).md5partial md56 = fs.File(p["file3.test"]).md5partial # The expected md5 is the md5 of md5s for folders and the direct md5 for files folder_md51 = hashlib.md5(md51).digest() folder_md52 = hashlib.md5(md52).digest() folder_md53 = hashlib.md5(md53).digest() md5 = hashlib.md5(folder_md51 + folder_md52 + folder_md53 + md54 + md55 + md56) eq_(b.md5partial, md5.digest()) md51 = fs.File(p["dir1"]["file1.test"]).md5samples md52 = fs.File(p["dir2"]["file2.test"]).md5samples md53 = fs.File(p["dir3"]["file3.test"]).md5samples md54 = fs.File(p["file1.test"]).md5samples md55 = fs.File(p["file2.test"]).md5samples md56 = fs.File(p["file3.test"]).md5samples # The expected md5 is the md5 of md5s for folders and the direct md5 for files folder_md51 = hashlib.md5(md51).digest() folder_md52 = hashlib.md5(md52).digest() folder_md53 = hashlib.md5(md53).digest() md5 = hashlib.md5(folder_md51 + folder_md52 + folder_md53 + md54 + md55 + md56) eq_(b.md5samples, md5.digest())
def _get_files(self, from_path, fileclasses, j): for root, dirs, files in os.walk(str(from_path)): j.check_if_cancelled() root = Path(root) state = self.get_state(root) if state == DirectoryState.Excluded: # Recursively get files from folders with lots of subfolder is expensive. However, there # might be a subfolder in this path that is not excluded. What we want to do is to skim # through self.states and see if we must continue, or we can stop right here to save time if not any(p[:len(root)] == root for p in self.states): del dirs[:] try: if state != DirectoryState.Excluded: found_files = [ fs.get_file(root + f, fileclasses=fileclasses) for f in files ] found_files = [f for f in found_files if f is not None] # In some cases, directories can be considered as files by dupeGuru, which is # why we have this line below. In fact, there only one case: Bundle files under # OS X... In other situations, this forloop will do nothing. for d in dirs[:]: f = fs.get_file(root + d, fileclasses=fileclasses) if f is not None: found_files.append(f) dirs.remove(d) logging.debug("Collected %d files in folder %s", len(found_files), str(from_path)) for file in found_files: file.is_ref = state == DirectoryState.Reference yield file except (EnvironmentError, fs.InvalidPath): pass
def test_cd_copy_with_job(self, dosetup, tmpdir): def OnNeedCD(location): assert location is volume return Path(self.rootpath) def update(progress,description=''): self.log.append(progress) return True root = Root(threaded=False) volume = root.new_directory('volume') volume.vol_type = VOLTYPE_CDROM dir = volume.new_directory('fs') dir.new_file('file1.test') dir.new_file('file2.test') dir.new_file('file3.test') renamed = manualfs.Directory(None, '') renamed.copy(volume) bo = BatchOperation(renamed,Path(str(tmpdir))) bo.OnNeedCD = OnNeedCD self.log = [] job = Job(1,update) assert bo.copy(job) expected_log = [0, 0, 33, 66, 100] eq_(expected_log,self.log)
def test_addDirectory_already_there(self, do_setup): app = self.app otherpath = Path(op.dirname(__file__)) app.add_directory(otherpath) app.add_directory(otherpath) eq_(len(app.view.messages), 1) assert "already" in app.view.messages[0]
def test_addDirectory_simple(self, do_setup): # There's already a directory in self.app, so adding another once makes 2 of em app = self.app # any other path that isn't a parent or child of the already added path otherpath = Path(op.dirname(__file__)) app.add_directory(otherpath) eq_(len(app.directories), 2)
def test_japanese_unicode(self, tmpdir): p1 = Path(str(tmpdir)) p1["$Recycle.Bin"].mkdir() p1["$Recycle.Bin"]["somerecycledfile.png"].open("w").close() p1["$Recycle.Bin"]["some_unwanted_file.jpg"].open("w").close() p1["$Recycle.Bin"]["subdir"].mkdir() p1["$Recycle.Bin"]["subdir"]["過去白濁物語~]_カラー.jpg"].open("w").close() p1["$Recycle.Bin"]["思叫物語"].mkdir() p1["$Recycle.Bin"]["思叫物語"]["なししろ会う前"].open("w").close() p1["$Recycle.Bin"]["思叫物語"]["堂~ロ"].open("w").close() self.d.add_path(p1["$Recycle.Bin"]) regex3 = r".*物語.*" self.d._exclude_list.add(regex3) self.d._exclude_list.mark(regex3) # print(f"get_folders(): {[x for x in self.d.get_folders()]}") eq_(self.d.get_state(p1["$Recycle.Bin"]["思叫物語"]), DirectoryState.Excluded) files = self.get_files_and_expect_num_result(2) assert "過去白濁物語~]_カラー.jpg" not in files assert "なししろ会う前" not in files assert "堂~ロ" not in files # using end of line character should only filter that directory, not affecting its files regex4 = r".*物語$" self.d._exclude_list.rename(regex3, regex4) assert self.d._exclude_list.error(regex4) is None self.d.set_state(p1["$Recycle.Bin"]["思叫物語"], DirectoryState.Normal) files = self.get_files_and_expect_num_result(5) assert "過去白濁物語~]_カラー.jpg" in files assert "なししろ会う前" in files assert "堂~ロ" in files
def dropMimeData(self, mimeData, action, row, column, parentIndex): # In the test I have made, the row and column args always seem to be -1/-1 except when # parentIndex is invalid (which means that the drop destination is the root node). def find_path(path): if path[0] == DESIGN_BOARD_NAME: return self.app.board.find_path(path[1:]) elif path[0] == IGNORE_BOX_NAME: return self.app.board.ignore_box.find_path(path[1:]) if not mimeData.hasFormat(MIME_PATHS): return False if parentIndex.isValid(): destNode = parentIndex.internalPointer() else: destNode = self paths = str(mimeData.data(MIME_PATHS), 'utf-8').split('\n') sourceItems = set(find_path(Path(path)) for path in paths) sourceItems = set(item for item in sourceItems if item.parent not in sourceItems | set([destNode.ref])) if not sourceItems: return False smart_move(sourceItems, destNode.ref, allow_merge=True) destNode.invalidate() # InsertRow calls have to be made at correct indexes or else the subsequent removeRows call # will be made at incorrect indexes. To do so, we just go through every subitem of destNode.ref # and if it's in sourceItems, we call insertRow. # destNode.subnodes for index, node in enumerate(destNode.subnodes): if node.ref in sourceItems: self.insertRow(index, parentIndex) return True
class Directories(DirectoriesBase): ROOT_PATH_TO_EXCLUDE = list( map(Path, [ '/Library', '/Volumes', '/System', '/bin', '/sbin', '/opt', '/private', '/dev' ])) HOME_PATH_TO_EXCLUDE = [Path('Library')] def _default_state_for_path(self, path): result = DirectoriesBase._default_state_for_path(self, path) if result is not None: return result if path in self.ROOT_PATH_TO_EXCLUDE: return DirectoryState.Excluded if path[:2] == Path( '/Users') and path[3:] in self.HOME_PATH_TO_EXCLUDE: return DirectoryState.Excluded def _get_folders(self, from_folder, j): # We don't want to scan bundle's subfolder even in Folders mode. Bundle's integrity has to # stay intact. if is_bundle(str(from_folder.path)): # just yield the current folder and bail state = self.get_state(from_folder.path) if state != DirectoryState.Excluded: from_folder.is_ref = state == DirectoryState.Reference yield from_folder return else: yield from DirectoriesBase._get_folders(self, from_folder, j) @staticmethod def get_subfolders(path): result = DirectoriesBase.get_subfolders(path) return [p for p in result if not is_bundle(str(p))]
def get_itunes_database_path(): plisturls = proxy.prefValue_inDomain_('iTunesRecentDatabases', 'com.apple.iApps') if not plisturls: raise directories.InvalidPathError() plistpath = proxy.url2path_(plisturls[0]) return Path(plistpath)
def test_with_ioerror_cd(self, dosetup, tmpdir): #When copying from CDs, operation throwing IOError should call OnNeedCD #until the file is found or the whole operation is cancelled. def OnNeedCD(location): self.need_cd_calls += 1 if self.need_cd_calls == 3: return assert location is volume return self.rootpath root = Root(threaded=False) volume = root.new_directory('volume') volume.vol_type = VOLTYPE_CDROM dir = volume.new_directory('fs') dir.new_file('file1.test') dir.new_file('file2.test') dir.new_file('file3.test') dir.new_file('fake') renamed = manualfs.Directory(None, '') renamed.copy(volume) bo = BatchOperation(renamed,Path(str(tmpdir))) self.need_cd_calls = 0 bo.OnNeedCD = OnNeedCD assert not bo.copy() eq_(3,self.need_cd_calls)
class Root(sql.Root): cls_dir_class = Volume cls_file_class = File def __init__(self, dbname=':memory:', dirname='', threaded=True): super(Root, self).__init__(dbname, dirname, threaded=threaded) self._attrs_to_read = [ 'audiosize', 'size', 'ctime', 'mtime', 'duration', 'bitrate', 'samplerate', 'title', 'artist', 'album', 'genre', 'year', 'track', 'comment' ] def add_volume(self, ref, volume_name, volume_type, job=nulljob): result = self.new_directory(volume_name) try: result.update(ref, job) except JobCancelled: # If add_volume is cancelled, we don't want a half updated volume added. # We want nothing added. result.delete() raise result.vol_type = volume_type result.initial_path = ref.path return result def update_volumes(self, job=nulljob): updatable = [ volume for volume in self if volume.vol_type == VOLTYPE_FIXED ] job = job.start_subjob(len(updatable)) for volume in updatable: volume.update(job=job) buffer_path = Path(())
def copy_or_move(self, dupe, copy: bool, destination: str, dest_type: DestType): source_path = dupe.path location_path = first(p for p in self.directories if dupe.path in p) dest_path = Path(destination) if dest_type in {DestType.Relative, DestType.Absolute}: # no filename, no windows drive letter source_base = source_path.remove_drive_letter()[:-1] if dest_type == DestType.Relative: source_base = source_base[location_path:] dest_path = dest_path + source_base if not dest_path.exists(): dest_path.makedirs() # Add filename to dest_path. For file move/copy, it's not required, but for folders, yes. dest_path = dest_path + source_path[-1] logging.debug("Copy/Move operation from '%s' to '%s'", source_path, dest_path) # Raises an EnvironmentError if there's a problem if copy: smart_copy(source_path, dest_path) else: smart_move(source_path, dest_path) self.clean_empty_dirs(source_path[:-1])
def get_iphoto_or_aperture_pictures(plistpath: Path, photo_class): # The structure of iPhoto and Aperture libraries for the base photo list are excactly the same. if not plistpath.exists(): return [] s = plistpath.open('rt', encoding='utf-8').read() # There was a case where a guy had 0x10 chars in his plist, causing expat errors on loading s = remove_invalid_xml(s, replace_with='') # It seems that iPhoto sometimes doesn't properly escape & chars. The regexp below is to find # any & char that is not a &-based entity (&, ", etc.). based on TextMate's XML # bundle's regexp s, count = re.subn(r'&(?![a-zA-Z0-9_-]+|#[0-9]+|#x[0-9a-fA-F]+;)', '', s) if count: logging.warning("%d invalid XML entities replacement made", count) plist = plistlib.readPlistFromBytes(s.encode('utf-8')) result = [] for key, photo_data in plist['Master Image List'].items(): if photo_data['MediaType'] != 'Image': continue photo_path = Path(photo_data['ImagePath']) photo = photo_class(photo_path, key) result.append(photo) return result
def test_save_and_load(tmpdir): d1 = Directories() d2 = Directories() p1 = Path(str(tmpdir.join('p1'))) p1.mkdir() p2 = Path(str(tmpdir.join('p2'))) p2.mkdir() d1.add_path(p1) d1.add_path(p2) d1.set_state(p1, DirectoryState.Reference) d1.set_state(p1['dir1'], DirectoryState.Excluded) tmpxml = str(tmpdir.join('directories_testunit.xml')) d1.save_to_file(tmpxml) d2.load_from_file(tmpxml) eq_(2, len(d2)) eq_(DirectoryState.Reference ,d2.get_state(p1)) eq_(DirectoryState.Excluded ,d2.get_state(p1['dir1']))
def test_get_files_does_not_choke_on_non_existing_directories(tmpdir): d = Directories() p = Path(str(tmpdir)) d.add_path(p) p.rmtree() eq_([], list(d.get_files()))
def can_handle(cls, path: Path): return not path.islink() and path.isdir() and is_bundle(str(path))