Example #1
0
 def test_safety_wrapper(self):
     rawfs = MemoryFS()
     safefs = dokan.Win32SafetyFS(rawfs)
     rawfs.setcontents("autoRun.inf", b("evilcodeevilcode"))
     self.assertTrue(safefs.exists("_autoRun.inf"))
     self.assertTrue("autoRun.inf" not in safefs.listdir("/"))
     safefs.setcontents("file:stream",b("test"))
     self.assertFalse(rawfs.exists("file:stream"))
     self.assertTrue(rawfs.exists("file__colon__stream"))
     self.assertTrue("file:stream" in safefs.listdir("/"))
Example #2
0
 def test_safety_wrapper(self):
     rawfs = MemoryFS()
     safefs = dokan.Win32SafetyFS(rawfs)
     rawfs.setcontents("autoRun.inf", b("evilcodeevilcode"))
     self.assertTrue(safefs.exists("_autoRun.inf"))
     self.assertTrue("autoRun.inf" not in safefs.listdir("/"))
     safefs.setcontents("file:stream", "test")
     self.assertFalse(rawfs.exists("file:stream"))
     self.assertTrue(rawfs.exists("file__colon__stream"))
     self.assertTrue("file:stream" in safefs.listdir("/"))
Example #3
0
def test__when_deleting_with_glob_pattern__it_deletes_matching_files():
    mem_fs = MemoryFS()
    mem_fs.create("hello.txt")
    mem_fs.create("world.txt")
    mem_fs.create("nope.gif")

    sut = _TestFilesystemImpl(mem_fs)

    sut.delete("*.txt")

    assert not mem_fs.exists("hello.txt")
    assert not mem_fs.exists("world.txt")
    assert mem_fs.exists("nope.gif")
Example #4
0
def test__when_copying_files_with_glob_pattern__it_copies_matching_files():
    mem_fs = MemoryFS()
    mem_fs.create("hello.txt")
    mem_fs.create("world.txt")
    mem_fs.create("nope.gif")

    sut = _TestFilesystemImpl(mem_fs)

    sut.copy("*.txt", "newdir/")

    assert mem_fs.exists("newdir/hello.txt")
    assert mem_fs.exists("newdir/world.txt")
    assert not mem_fs.exists("newdir/nope.gif")
Example #5
0
def test__when_copying_dirs_with_glob_patterns__it_copies_matching_dirs_with_content(
):
    mem_fs = MemoryFS()
    sub_fs = mem_fs.makedirs("sub/first")
    sub_fs.create("file.txt")

    sub_fs = mem_fs.makedirs("sub/second")
    sub_fs.create("another.txt")

    sut = _TestFilesystemImpl(mem_fs)

    sut.copy("sub/*", "otherdir/")

    assert mem_fs.exists("otherdir/first/file.txt")
    assert mem_fs.exists("otherdir/second/another.txt")
Example #6
0
    def test_movedir_indir(self):
        """Test movedir in a directory"""        
        fs1 = MemoryFS()
        fs2 = MemoryFS()
        fs1sub = fs1.makeopendir("from")
        self._make_fs(fs1sub)            
        utils.movedir((fs1, "from"), (fs2, "copy"))        
        self.assert_(not fs1.exists("from"))     
        self._check_fs(fs2.opendir("copy"))

        fs1 = TempFS()
        fs2 = TempFS()
        fs1sub = fs1.makeopendir("from")
        self._make_fs(fs1sub)            
        utils.movedir((fs1, "from"), (fs2, "copy"))
        self.assert_(not fs1.exists("from"))      
        self._check_fs(fs2.opendir("copy"))
Example #7
0
    def test_movedir_root(self):
        """Test movedir to root dir"""        
        fs1 = MemoryFS()
        fs2 = MemoryFS()
        fs1sub = fs1.makeopendir("from")
        self._make_fs(fs1sub)            
        utils.movedir((fs1, "from"), fs2)
        self.assert_(not fs1.exists("from"))     
        self._check_fs(fs2)

        fs1 = TempFS()
        fs2 = TempFS()
        fs1sub = fs1.makeopendir("from")
        self._make_fs(fs1sub)            
        utils.movedir((fs1, "from"), fs2)
        self.assert_(not fs1.exists("from"))        
        self._check_fs(fs2)
Example #8
0
 def test_makedir(self):
     mfs = MemoryFS()
     mfs.makedir('test')
     d = Directory('test', mfs)
     result = d.makedir('sub')
     self.assertTrue(mfs.exists('test/sub'))
     self.assertIsInstance(result, Directory)
     self.assertEqual(result.path.s, 'test/sub')
Example #9
0
 def test_apply(self):
     mfs = MemoryFS()
     d = Directory('.', mfs)
     o = ini.OpSubdirectoriesMaker(3)
     r = RoutineOnDirectory(d, [o], dryrun=True)
     result = o.apply(r)
     dirs = ['sub.{}'.format(i) for i in range(3)]
     for d in dirs:
         self.assertTrue(mfs.exists(d))
Example #10
0
def test__when_copying_file__but_parent_dir_missing__should_create_missing_dirs(
):
    complete_path = "another/folder/" + TARGET

    mem_fs = MemoryFS()
    write_file_with_content(mem_fs, SOURCE)

    sut = _TestFilesystemImpl(mem_fs)
    sut.copy(SOURCE, complete_path)

    assert mem_fs.exists(complete_path)
Example #11
0
def test__when_copying_file__should_copy_to_target_path():
    mem_fs = MemoryFS()
    content = "content"
    write_file_with_content(mem_fs, SOURCE, content)

    sut = _TestFilesystemImpl(mem_fs)

    sut.copy(SOURCE, TARGET)

    assert mem_fs.exists(TARGET)
    assert_file_content_equals(mem_fs, TARGET, content)
Example #12
0
def test__when_copying_file_to_other_filesystem__should_call_copy_file():
    target_fs = MemoryFS()
    origin_fs = MemoryFS()
    write_file_with_content(origin_fs, SOURCE, "content")

    sut = _TestFilesystemImpl(origin_fs)

    sut.copy(SOURCE, TARGET, filesystem=_TestFilesystemImpl(target_fs))

    assert target_fs.exists(TARGET)
    assert_file_content_equals(target_fs, TARGET, "content")
Example #13
0
def test__when_copying_directory__but_directory_exists__should_copy_into_existing_directory(
):
    origin_fs = MemoryFS()
    sub_fs = origin_fs.makedir("sourcedir")
    origin_fs.makedir("targetdir")
    write_file_with_content(sub_fs, SOURCE, "content")

    sut = _TestFilesystemImpl(origin_fs)
    sut.copy("sourcedir", "targetdir")

    complete_path = f"targetdir/{SOURCE}"
    assert origin_fs.exists(complete_path)
Example #14
0
def test__a__when_copying_directory__should_copy_entire_directory():
    src_dir = "mydir"
    copy_dir = "copydir"
    mem_fs = MemoryFS()
    sub_fs = mem_fs.makedir(src_dir)
    write_file_with_content(sub_fs, SOURCE)

    sut = _TestFilesystemImpl(mem_fs)

    sut.copy(src_dir, copy_dir)

    assert mem_fs.exists(f"{copy_dir}/{SOURCE}")
Example #15
0
def test__when_copying_file_to_other_filesystem__but_parent_dir_missing__should_create_missing_dirs(
):
    complete_path = "another/folder/" + TARGET

    target_fs = MemoryFS()
    origin_fs = MemoryFS()
    write_file_with_content(origin_fs, SOURCE)

    sut = _TestFilesystemImpl(origin_fs)

    sut.copy(SOURCE, complete_path, filesystem=_TestFilesystemImpl(target_fs))

    assert target_fs.exists(complete_path)
Example #16
0
 def test_copy_files(self):
     mfs = MemoryFS()
     mfs.touch('txt1.txt')
     mfs.touch('txt2.txt')
     mfs.makedir('sub1')
     mfs.makedir('sub2')
     new_files = [
         'sub1/txt1.txt', 'sub1/txt2.txt', 'sub2/txt1.txt', 'sub2/txt2.txt'
     ]
     for n in new_files:
         self.assertFalse(mfs.exists(n))
     d = Directory('.', mfs)
     targets = d.listdir_as_observable().filter(match_directory(['sub*']))
     sources = d.listdir_as_observable().filter(match_file(['txt*']))
     sources.subscribe(lambda f: print(f.path.s))
     sources_list = []
     sources.subscribe(sources_list.append)
     results = (targets.flat_map(
         lambda d: d.sync(sources)).to_list().to_blocking().first())
     self.assertEqual(len(results), 4)
     for n in new_files:
         self.assertTrue(mfs.exists(n))
Example #17
0
def test__when_copying_directory_to_other_filesystem__should_copy_dir():
    source_dir = "mydir"
    target_dir = "copydir"

    origin_fs = MemoryFS()
    sub_fs = origin_fs.makedir(source_dir)
    write_file_with_content(sub_fs, SOURCE, "content")

    sut = _TestFilesystemImpl(origin_fs)

    target_fs = MemoryFS()
    sut.copy(source_dir, target_dir, filesystem=_TestFilesystemImpl(target_fs))

    complete_path = f"{target_dir}/{SOURCE}"
    assert target_fs.exists(complete_path)
    assert_file_content_equals(target_fs, complete_path, "content")
    def test_garbage_collection(self):

        log_file = self.get_resource('crashplan_backup_files.log')

        new_file = u'foo.txt'
        newer_file = u'/my/crashplan/backups/vms/finn/finn-2018-08-15_00-09-00/finn-5-s004.vmdk'
        older_file = u'/my/crashplan/backups/vms/gabarolas/gabarolas-2018-08-02_16-07-17/gabarolas-s067.vmdk'

        with CrashPlanFS(log_file=log_file.strpath) as fs:
            assert not fs.exists(new_file)
            assert fs.exists(newer_file)
            assert fs.exists(older_file)
            older_file_remote_mtime = fs.getdetails(older_file).modified

        # Create a mock transfer area
        from fs.memoryfs import MemoryFS
        transfer_area = MemoryFS()

        # Populate the transfer area
        transfer_area.appendtext(new_file, u'This file is new')
        transfer_area.makedirs(os.path.split(newer_file)[0])
        transfer_area.appendtext(newer_file,
                                 u'This file has been modified locally')
        transfer_area.makedirs(os.path.split(older_file)[0])
        transfer_area.appendtext(older_file, u'This file is up-to-date')
        transfer_area.settimes(older_file, modified=older_file_remote_mtime)

        assert transfer_area.getdetails(
            older_file).modified <= older_file_remote_mtime

        # Pass the transfer area to crashplanfs
        fs = CrashPlanFS(log_file=log_file.strpath,
                         transfer_area=transfer_area)

        # The new file should not be listed, as it only exists in the transfer area
        assert not fs.exists(new_file)

        # The newer file should be listed, with the remote modification time
        assert fs.exists(newer_file)
        assert transfer_area.getdetails(new_file).modified > fs.getdetails(
            newer_file).modified

        # The older file should be deleted from the transfer area
        assert not transfer_area.exists(older_file)
Example #19
0
 def test_apply(self):
     mfs = MemoryFS()
     subs = ['sub.{}'.format(i) for i in range(3)]
     for d in subs:
         mfs.makedir(d)
     d = Directory('.', mfs)
     mfs.touch('test1.txt')
     mfs.touch('test2.txt')
     o0 = ini.OpAddToBroadcastFile('test1.txt')
     o1 = ini.OpAddToBroadcastFile('test2.txt')
     obc = ini.OpBroadcastFile(['sub*'])
     r = RoutineOnDirectory(d, [o0, o1, obc])
     r.work()
     target_files = []
     for d in subs:
         for f in ['test1.txt', 'test2.txt']:
             target_files.append(d + '/' + f)
     for i, t in enumerate(target_files):
         with self.subTest(i):
             self.assertTrue(mfs.exists(t))
Example #20
0
class VirtualFilesystem(AbstractedFS):
    """Represents a virtual filesystem (currently only memory and s3 are supported)
    """
    
    def __init__(self, root, cmd_channel):
        AbstractedFS.__init__(self, root, cmd_channel)
        self.cwd = root
        self.type = cmd_channel.type
        self.s3_bucket = cmd_channel.s3_bucket
        self.aws_access_key = cmd_channel.aws_access_key
        self.aws_secret_key = cmd_channel.aws_secret_key
        self.seperator = cmd_channel.seperator
        self.thread_synchronize = cmd_channel.thread_synchronize
        self.key_sync_timeout = cmd_channel.key_sync_timeout
        if not self.cmd_channel.fs_obj:
            if self.type == "memory":
                self.fs_obj = MemoryFS()
            elif self.type == "s3":
                self.fs_obj = S3FS(bucket=self.bucket, prefix=self.prefix, aws_access_key=self.aws_access_key, aws_secret_key=self.aws_secret_key, separator=self.seperator, thread_synchronize=self.thread_synchronize, key_sync_timeout=self.key_sync_timeout)
            self.cmd_channel.fs_obj = self.fs_obj
        else:
            self.fs_obj = self.cmd_channel.fs_obj
            

    def ftp2fs(self, ftppath):
        return self.ftpnorm(ftppath)

    def fs2ftp(self, fspath):
        return fspath

    def validpath(self, path):
        # validpath was used to check symlinks escaping user home
        # directory; this is no longer necessary.
        return True
    
    def open(self, filename, mode):
            f = self.fs_obj.open(filename, mode)
            f.name=filename
            return f
    
    def mkdir(self, path):
        return self.fs_obj.makedir(path)
        
    def chdir(self, path):
        return self.fs_obj.opendir(path)
    
    def listdir(self,path):
        return self.fs_obj.listdir(path)
    
    def rmdir(self, path):
        return self.fs_obj.removedir(path)
    
    def remove(self, path):
        return self.fs_obj.remove(path)
    
    def rename(self, src, dst):
        return self.fs_obj.rename(src, dst)
    
    def chmod(self, path, mode):
        return True
    
    def readlink(self, path):
        return self.ftp2fs(path)
    
    def isfile(self, path):
        return self.fs_obj.isfile(path)
    
    def islink(self, path):
        return False
    
    def getsize(self, path):
        return self.fs_obj.getsize(path)
    
    def getmtime(self, path):
        return self.fs_obj.getinfo(path)['modified_time']
    
    def realpath(self, path):
        return path
    
    def lexists(self, path):
        return self.fs_obj.exists(path)
    
    def mkstemp(self, suffix='', prefix='', mode='wb'):
        from tempfile import _RandomNameSequence as RandomName
        name = RandomName()
        if suffix != '':
            suffix = 'tmp'
        fname = suffix + name.next()
        return self.fs_obj.open(fname,mode)
class BigFS(FS):

    """A FileSystem that represents a BIG file."""
    
    _meta = { 'virtual' : False,
              'read_only' : True,
              'unicode_paths' : True,
              'case_insensitive_paths' : False,
              'network' : False,                        
             }

    def __init__(self, filename, mode="r", thread_synchronize=True):
        """Create a FS that maps on to a big file.

        :param filename: A (system) path, or a file-like object
        :param mode: Mode to open file: 'r' for reading, 'w' and 'a' not supported
        :param thread_synchronize: -- Set to True (default) to enable thread-safety

        """
        super(BigFS, self).__init__(thread_synchronize=thread_synchronize)

        if len(mode) > 1 or mode not in "r":
            raise ValueError("mode must be 'r'")
        self.file_mode = mode
        self.big_path = str(filename)

        self.entries = {}
        try:
            self.bf = open(filename, "rb")
        except IOError:
            raise ResourceNotFoundError(str(filename), msg="BIG file does not exist: %(path)s")

        self._path_fs = MemoryFS()
        if mode in 'ra':
            self._parse_resource_list(self.bf)

    def __str__(self):
        return "<BigFS: %s>" % self.big_path

    def __unicode__(self):
        return unicode(self.__str__())


    def _parse_resource_list(self, g):
        magicWord = g.read(4)
        if magicWord != "BIGF" and magicWord != "BIG4":
            raise ValueError("Magic word of BIG file invalid: " + filename + " " + repr(magicWord))
        header = g.read(12)
        header = unpack(">III", header)
        BIGSize = header[0]
        fileCount = header[1]
        bodyOffset = header[2]
        for i in range(fileCount):
            fileHeader = g.read(8)
            fileHeader = unpack(">II", fileHeader)

            pos = g.tell()
            buf = g.read(4096)
            marker = buf.find("\0")
            if marker == -1:
                raise ValueError("Could not parse filename in BIG file: Too long or invalid file")
            name = buf[:marker]
            # TODO: decode the encoding of name (or normalize the path?)
            isCompressed, uncompressedSize = self.__isCompressed(g, fileHeader[0], fileHeader[1])
            be = BIGEntry(name, fileHeader[0], fileHeader[1], isCompressed, uncompressedSize)
            name = normpath(name)
            self.entries[name] = be
            self._add_resource(name)
            g.seek(pos + marker + 1)

    def __isCompressed(self, g, offset, size):
        g.seek(offset)
        buf = g.read(2)
        magic = unpack(">H", buf)[0]
        if (magic & 0x3EFF) == 0x10FB:
            # it is compressed
            if magic & 0x8000:
                # decompressed size is uint32
                return True, unpack(">I", g.read(4))[0]
            else:
                # use only 3 bytes
                return True, unpack(">I", "\0" + g.read(3))[0]
        return False, size

    def _add_resource(self, path):
        if path.endswith('/'):
            path = path[:-1]
            if path:
                self._path_fs.makedir(path, recursive=True, allow_recreate=True)
        else:
            dirpath, filename = pathsplit(path)
            if dirpath:
                self._path_fs.makedir(dirpath, recursive=True, allow_recreate=True)
            f = self._path_fs.open(path, 'w')
            f.close()


    def close(self):
        """Finalizes the zip file so that it can be read.
        No further operations will work after this method is called."""

        if hasattr(self, 'bf') and self.bf:
            self.bf.close()
            self.bf = _ExceptionProxy()

    @synchronize
    def open(self, path, mode="r", **kwargs):
        path = normpath(relpath(path))        

        if 'r' in mode:
            if self.file_mode not in 'ra':
                raise OperationFailedError("open file", path=path, msg="Big file must be opened for reading ('r') or appending ('a')")
            try:
                return self.entries[path].getfile(self.bf)
            except KeyError:
                raise ResourceNotFoundError(path)

        if 'w' in mode:
            raise OperationFailedError("open file", path=path, msg="Big file cannot be edited ATM")

        raise ValueError("Mode must contain be 'r' or 'w'")

    @synchronize
    def getcontents(self, path):
        if not self.exists(path):
            raise ResourceNotFoundError(path)
        path = normpath(path)
        try:
            contents = self.entries[path].getcontents(self.bf)
        except KeyError:
            raise ResourceNotFoundError(path)
        except RuntimeError:
            raise OperationFailedError("read file", path=path, msg="Big file must be oppened with 'r' or 'a' to read")
        return contents

    def desc(self, path):
        if self.isdir(path):
            return "Dir in big file: %s" % self.big_path
        else:
            return "File in big file: %s" % self.big_path

    def isdir(self, path):
        return self._path_fs.isdir(path)

    def isfile(self, path):
        return self._path_fs.isfile(path)

    def exists(self, path):
        return self._path_fs.exists(path)

    @synchronize
    def makedir(self, dirname, recursive=False, allow_recreate=False):
        dirname = normpath(dirname)
        if self.file_mode not in "wa":
            raise OperationFailedError("create directory", path=dirname, msg="Big file must be opened for writing ('w') or appending ('a')")
        if not dirname.endswith('/'):
            dirname += '/'
        self._add_resource(dirname)

    def listdir(self, path="/", wildcard=None, full=False, absolute=False, dirs_only=False, files_only=False):
        return self._path_fs.listdir(path, wildcard, full, absolute, dirs_only, files_only)

    @synchronize
    def getinfo(self, path):
        if not self.exists(path):
            raise ResourceNotFoundError(path)
        path = normpath(path).lstrip('/')
        info = {'size': 0}
        if path in self.entries:
            be = self.entries[path]
            info['size'] = be.realSize
            info['file_size'] = be.realSize
            info['stored_size'] = be.storedSize
            info['is_compressed'] = be.isCompressed
            info['offset'] = be.offset
            info['internal_filename'] = be.filename
            info['filename'] = path
        return info
Example #22
0
def make_temp_fs(fff):
    # make virtual filesystem in ram with the final
    # organization of the filesystem
    ff = cache_directory(read_only(fff))
    ram = MemoryFS()

    # for path, dirs, files in ff.walk():
    posprocsub = []
    fils = set()
    files = ff.scandir('/')
    path = '/'
    folds = set([i.name for i in files if i.is_dir])
    files = ff.scandir('/')
    for j in files:
        if not j.is_file:
            continue
        if splitext(j.name)[1] in subs_formats:
            posprocsub.append(j.name)
            continue
        pp = rename(j.name)
        if pp.error:
            pp = parse(j.name)
        try:
            if pp.is_video:
                fold = transform(pp.title)
                if not (fold in folds):
                    fold = best_ed(fold, folds)
                    folds.add(fold)
                pth = join('/', fold)
                if not ram.exists(pth):
                    ram.makedir(fold)
                fils.add(pp.title)
                if pp.episode:
                    if pp.season:
                        fill = pp.title + ' - ' + \
                            str(pp.season) + 'X' + str(pp.episode)
                    else:
                        fill = pp.title + ' - ' + str(pp.episode)
                else:
                    fill = pp.title
                if pp.episode_title:
                    fill += ' - ' + str(pp.episode_title)
                fill += pp.ext
                ram.settext(join(pth, fill), join(path, j.name))
        except KeyError:
            continue

        for j in posprocsub:
            pp = rename(j)
            if pp.error:
                pp = parse(j.name)
            fold = transform(pp.title)
            pth = join('/', fold)
            if pp.episode:
                if pp.season:
                    fill = pp.title + ' - ' + \
                        str(pp.season) + 'X' + str(pp.episode)
                else:
                    fill = fold + ' - ' + str(pp.episode)
            else:
                fill = fold
            if pp.episode_title:
                fill = fill + ' - ' + str(pp.episode_title)
            fill += pp.ext
            if ram.exists(pth):
                ram.settext(join(pth, fill), join(path, j))
            elif len(fils) == 1:
                pth = join('/', list(fils)[0])
                ram.settext(join(pth, fill), join(path, j))
            elif len(fils) > 1:
                best = None
                gap = 3
                for i in fils:
                    n = editDistance(i, fold)
                    if n < 3 and n < gap:
                        best = i
                        gap = n
                    elif n == 0:
                        best = i
                        break
                if best:
                    pth = join('/', best)
                    ram.settext(join(pth, fill), join(path, j))
                else:
                    if not (ram.exists('/subs')):
                        ram.makedir('/subs')
                    ram.settext(join('/subs', j), join(path, j))
            else:
                if not (ram.exists('/subs')):
                    ram.makedir('/subs')
                ram.settext(join('/subs', j), join(path, j))
    return ram
Example #23
0
class BigFS(FS):
    """A FileSystem that represents a BIG file."""

    _meta = {
        'virtual': False,
        'read_only': True,
        'unicode_paths': True,
        'case_insensitive_paths': False,
        'network': False,
    }

    def __init__(self, filename, mode="r", thread_synchronize=True):
        """Create a FS that maps on to a big file.

        :param filename: A (system) path, or a file-like object
        :param mode: Mode to open file: 'r' for reading, 'w' and 'a' not supported
        :param thread_synchronize: -- Set to True (default) to enable thread-safety

        """
        super(BigFS, self).__init__(thread_synchronize=thread_synchronize)

        if len(mode) > 1 or mode not in "r":
            raise ValueError("mode must be 'r'")
        self.file_mode = mode
        self.big_path = str(filename)

        self.entries = {}
        try:
            self.bf = open(filename, "rb")
        except IOError:
            raise ResourceNotFoundError(
                str(filename), msg="BIG file does not exist: %(path)s")

        self._path_fs = MemoryFS()
        if mode in 'ra':
            self._parse_resource_list(self.bf)

    def __str__(self):
        return "<BigFS: %s>" % self.big_path

    def __unicode__(self):
        return unicode(self.__str__())

    def _parse_resource_list(self, g):
        magicWord = g.read(4)
        if magicWord != "BIGF" and magicWord != "BIG4":
            raise ValueError("Magic word of BIG file invalid: " + filename +
                             " " + repr(magicWord))
        header = g.read(12)
        header = unpack(">III", header)
        BIGSize = header[0]
        fileCount = header[1]
        bodyOffset = header[2]
        for i in range(fileCount):
            fileHeader = g.read(8)
            fileHeader = unpack(">II", fileHeader)

            pos = g.tell()
            buf = g.read(4096)
            marker = buf.find("\0")
            if marker == -1:
                raise ValueError(
                    "Could not parse filename in BIG file: Too long or invalid file"
                )
            name = buf[:marker]
            # TODO: decode the encoding of name (or normalize the path?)
            isCompressed, uncompressedSize = self.__isCompressed(
                g, fileHeader[0], fileHeader[1])
            be = BIGEntry(name, fileHeader[0], fileHeader[1], isCompressed,
                          uncompressedSize)
            name = normpath(name)
            self.entries[name] = be
            self._add_resource(name)
            g.seek(pos + marker + 1)

    def __isCompressed(self, g, offset, size):
        g.seek(offset)
        buf = g.read(2)
        magic = unpack(">H", buf)[0]
        if (magic & 0x3EFF) == 0x10FB:
            # it is compressed
            if magic & 0x8000:
                # decompressed size is uint32
                return True, unpack(">I", g.read(4))[0]
            else:
                # use only 3 bytes
                return True, unpack(">I", "\0" + g.read(3))[0]
        return False, size

    def _add_resource(self, path):
        if path.endswith('/'):
            path = path[:-1]
            if path:
                self._path_fs.makedir(path,
                                      recursive=True,
                                      allow_recreate=True)
        else:
            dirpath, filename = pathsplit(path)
            if dirpath:
                self._path_fs.makedir(dirpath,
                                      recursive=True,
                                      allow_recreate=True)
            f = self._path_fs.open(path, 'w')
            f.close()

    def close(self):
        """Finalizes the zip file so that it can be read.
        No further operations will work after this method is called."""

        if hasattr(self, 'bf') and self.bf:
            self.bf.close()
            self.bf = _ExceptionProxy()

    @synchronize
    def open(self, path, mode="r", **kwargs):
        path = normpath(relpath(path))

        if 'r' in mode:
            if self.file_mode not in 'ra':
                raise OperationFailedError(
                    "open file",
                    path=path,
                    msg=
                    "Big file must be opened for reading ('r') or appending ('a')"
                )
            try:
                return self.entries[path].getfile(self.bf)
            except KeyError:
                raise ResourceNotFoundError(path)

        if 'w' in mode:
            raise OperationFailedError("open file",
                                       path=path,
                                       msg="Big file cannot be edited ATM")

        raise ValueError("Mode must contain be 'r' or 'w'")

    @synchronize
    def getcontents(self, path):
        if not self.exists(path):
            raise ResourceNotFoundError(path)
        path = normpath(path)
        try:
            contents = self.entries[path].getcontents(self.bf)
        except KeyError:
            raise ResourceNotFoundError(path)
        except RuntimeError:
            raise OperationFailedError(
                "read file",
                path=path,
                msg="Big file must be oppened with 'r' or 'a' to read")
        return contents

    def desc(self, path):
        if self.isdir(path):
            return "Dir in big file: %s" % self.big_path
        else:
            return "File in big file: %s" % self.big_path

    def isdir(self, path):
        return self._path_fs.isdir(path)

    def isfile(self, path):
        return self._path_fs.isfile(path)

    def exists(self, path):
        return self._path_fs.exists(path)

    @synchronize
    def makedir(self, dirname, recursive=False, allow_recreate=False):
        dirname = normpath(dirname)
        if self.file_mode not in "wa":
            raise OperationFailedError(
                "create directory",
                path=dirname,
                msg=
                "Big file must be opened for writing ('w') or appending ('a')")
        if not dirname.endswith('/'):
            dirname += '/'
        self._add_resource(dirname)

    def listdir(self,
                path="/",
                wildcard=None,
                full=False,
                absolute=False,
                dirs_only=False,
                files_only=False):
        return self._path_fs.listdir(path, wildcard, full, absolute, dirs_only,
                                     files_only)

    @synchronize
    def getinfo(self, path):
        if not self.exists(path):
            raise ResourceNotFoundError(path)
        path = normpath(path).lstrip('/')
        info = {'size': 0}
        if path in self.entries:
            be = self.entries[path]
            info['size'] = be.realSize
            info['file_size'] = be.realSize
            info['stored_size'] = be.storedSize
            info['is_compressed'] = be.isCompressed
            info['offset'] = be.offset
            info['internal_filename'] = be.filename
            info['filename'] = path
        return info