def test_broken_metadata_with_backup(self):
        """test that each time a metadata file is updated a .old is kept"""
        self.shelf['bad_file'] = {'value': 'old'}
        path = self.shelf.key_file('bad_file')
        self.assertFalse(path_exists(path+'.old'))
        self.assertEqual({'value': 'old'}, self.shelf['bad_file'])
        # force the creation of the .old file
        self.shelf['bad_file'] = {'value': 'new'}
        self.assertTrue(path_exists(path+'.old'))
        # check that the new value is there
        self.assertEqual({'value': 'new'}, self.shelf['bad_file'])
        # write the current md file fwith 0 bytes
        open_file(path, 'w').close()
        # test that the old value is retrieved
        self.assertEqual({'value': 'old'}, self.shelf['bad_file'])

        self.shelf['broken_pickle'] = {'value': 'old'}
        path = self.shelf.key_file('broken_pickle')
        # check that .old don't exist
        self.assertFalse(path_exists(path+'.old'))
        # force the creation of the .old file
        self.shelf['broken_pickle'] = {'value': 'new'}
        # check that .old exists
        self.assertTrue(path_exists(path+'.old'))
        # check that the new value is there
        self.assertEqual({'value': 'new'}, self.shelf['broken_pickle'])
        # write random bytes to the md file
        with open_file(path, 'w') as f:
            f.write(BROKEN_PICKLE)
        # check that the old value is retrieved
        self.assertEqual({'value': 'old'}, self.shelf['broken_pickle'])
Ejemplo n.º 2
0
 def test_remove_dir(self):
     """Test the remove dir."""
     testdir = os.path.join(self.basedir, 'foodir')
     os.mkdir(testdir)
     assert path_exists(testdir)
     remove_dir(testdir)
     self.assertFalse(path_exists(testdir))
    def test_broken_metadata_with_backup(self):
        """overrides parent test as we have the value in the cache."""
        self.shelf['bad_file'] = {'value': 'old'}
        path = self.shelf.key_file('bad_file')
        self.assertFalse(path_exists(path+'.old'))
        self.assertEqual({'value': 'old'}, self.shelf['bad_file'])
        # force the creation of the .old file
        self.shelf['bad_file'] = {'value': 'new'}
        self.assertTrue(path_exists(path+'.old'))
        # check that the new value is there
        self.assertEqual({'value': 'new'}, self.shelf['bad_file'])
        # write the current md file fwith 0 bytes
        open_file(path, 'w').close()
        # HERE IS THE DIFFERENCE with the parent tests
        # test that the new value is retrieved from the cache!
        self.assertEqual({'value': 'new'}, self.shelf['bad_file'])

        self.shelf['broken_pickle'] = {'value': 'old'}
        path = self.shelf.key_file('broken_pickle')
        # check that .old don't exist
        self.assertFalse(path_exists(path+'.old'))
        # force the creation of the .old file
        self.shelf['broken_pickle'] = {'value': 'new'}
        # check that .old exists
        self.assertTrue(path_exists(path+'.old'))
        # check that the new value is there
        self.assertEqual({'value': 'new'}, self.shelf['broken_pickle'])
        # write random bytes to the md file
        with open_file(path, 'w') as f:
            f.write(BROKEN_PICKLE)
        # HERE IS THE DIFFERENCE with the parent tests
        # test that the new value is retrieved from the cache!
        self.assertEqual({'value': 'new'}, self.shelf['broken_pickle'])
Ejemplo n.º 4
0
def get_config_files():
    """ return the path to the config files or and empty list.
    The search path is based on the paths returned by load_config_paths
    but it's returned in reverse order (e.g: /etc/xdg first).
    """
    config_files = []
    for xdg_config_dir in load_config_paths('ubuntuone'):
        config_file = os.path.join(xdg_config_dir, CONFIG_FILE)
        if path_exists(config_file):
            config_files.append(config_file)

        config_logs = os.path.join(xdg_config_dir, CONFIG_LOGS)
        if path_exists(config_logs):
            config_files.append(config_logs)

    # reverse the list as load_config_paths returns the user dir first
    config_files.reverse()
    # if we are running from a branch, get the config files from it too
    config_file = os.path.join(os.path.dirname(__file__), os.path.pardir,
                               os.path.pardir, 'data', CONFIG_FILE)
    if path_exists(config_file):
        config_files.append(config_file)

    config_logs = os.path.join('data', CONFIG_LOGS)
    if path_exists(config_logs):
        config_files.append(config_logs)

    return config_files
Ejemplo n.º 5
0
 def test_create_dirs_already_exists_symlink_too(self):
     """test that creating a Main instance works as expected."""
     link = os.path.join(self.root, 'Shared With Me')
     make_link(self.shares, link)
     self.assertTrue(is_link(link))
     self.assertTrue(path_exists(self.shares))
     self.assertTrue(path_exists(self.root))
     main = self.build_main()
     # check that the shares link is actually a link
     self.assertTrue(is_link(main.shares_dir_link))
Ejemplo n.º 6
0
 def test_create_dirs_none_exists(self):
     """test that creating a Main instance works as expected."""
     # remove the existing dirs
     remove_dir(self.root)
     remove_dir(self.shares)
     main = self.build_main()
     # check that the shares link is actually a link
     self.assertTrue(is_link(main.shares_dir_link))
     self.assertTrue(path_exists(self.shares))
     self.assertTrue(path_exists(self.root))
Ejemplo n.º 7
0
    def test_rename_file(self, target=None):
        """Rename a file."""
        if target is None:
            target = os.path.join(self.basedir, 'target')

        assert path_exists(self.testfile)
        rename(self.testfile, target)

        self.assertFalse(path_exists(self.testfile),
            'Path %r should not exist after rename.' % self.testfile)
        self.assertTrue(path_exists(target),
            'Path %r should exist after rename.' % target)
 def test_delete_backups_too(self):
     """test that delitem also deletes the .old/.new files left around"""
     self.shelf["foo"] = "bar"
     # create the .old backup
     self.shelf["foo"] = "bar1"
     path = self.shelf.key_file('foo')
     # create a .new file (a hard reboot during the rename dance)
     open_file(path+'.new', 'w').close()
     # write 0 bytes to both
     del self.shelf['foo']
     self.assertFalse(path_exists(path))
     self.assertFalse(path_exists(path+'.old'), 'there is a .old file!')
     self.assertFalse(path_exists(path+'.new'), 'there is a .new file!')
Ejemplo n.º 9
0
    def test_rename_dir(self, source=None, target=None):
        """Rename a dir."""
        if source is None:
            source = os.path.join(self.basedir, 'source')
            os.mkdir(source)
        if target is None:
            target = os.path.join(self.basedir, 'target')

        rename(source, target)

        self.assertFalse(path_exists(source),
            'Path %r should not exist after rename.' % source)
        self.assertTrue(path_exists(target),
            'Path %r should exist after rename.' % target)
    def is_ignored(self, path):
        """should we ignore this path?"""
        # check first if the platform code knows hat to do with it
        if not self.platform_is_ignored(path):
            # check if we can read
            if path_exists(path) and not access(path):
                self.log.warning("Ignoring path as we don't have enough "
                                 "permissions to track it: %r", path)
                return True

            is_conflict = self.conflict_RE.search
            dirname, filename = os.path.split(path)
            # ignore conflicts
            if is_conflict(filename):
                return True
            # ignore partial downloads
            if filename == '.u1partial' or filename.startswith('.u1partial.'):
                return True

            # and ignore paths that are inside conflicts (why are we even
            # getting the event?)
            if any(part.endswith('.u1partial') or is_conflict(part)
                   for part in dirname.split(os.path.sep)):
                return True

            if self.ignore_RE is not None and self.ignore_RE.match(filename):
                return True

            return False
        return True
Ejemplo n.º 11
0
 def test_allow_writes(self):
     """Test for allow_writes."""
     set_dir_readonly(self.basedir)
     with allow_writes(self.basedir):
         foo_dir = os.path.join(self.basedir, 'foo')
         os.mkdir(foo_dir)
         self.assertTrue(path_exists(foo_dir))
Ejemplo n.º 12
0
 def test_write_extra(self):
     """Writing the throttling back to the file, with extra sections."""
     conf_file = os.path.join(
         self.test_root, 'test_write_extra_config.conf')
     # write some throttling values to the config file
     with open_file(conf_file, 'w') as fp:
         fp.write('[__main__]\n')
         fp.write('log_level = INFO\n')
         fp.write('disable_ssl_verify = True\n')
         fp.write('\n')
         fp.write('[bandwidth_throttling]\n')
         fp.write('on = False\n')
         fp.write('read_limit = 2000\n')
         fp.write('write_limit = 200\n')
     self.assertTrue(path_exists(conf_file))
     conf = config._Config(conf_file)
     conf.set_throttling(True)
     conf.set_throttling_read_limit(3000)
     conf.set_throttling_write_limit(300)
     conf.save()
     # load the config in a barebone ConfigParser and check
     conf_1 = ConfigParser()
     conf_1.read(conf_file)
     self.assertThrottlingSection(conf_1, conf, True, 3000, 300)
     self.assertEqual(conf_1.get('__main__', 'log_level'),
                      conf.get('__main__', 'log_level'))
     self.assertEqual(conf_1.getboolean('__main__', 'disable_ssl_verify'),
                      conf.getboolean('__main__', 'disable_ssl_verify'))
Ejemplo n.º 13
0
    def start(self):
        """Start the comparison."""
        log_info("start scan all volumes")
        to_scan = self._get_volumes(all_volumes=False)
        for vol in to_scan:
            # check that the path exists in disk
            if not path_exists(vol.path):
                log_warning('Volume dissapeared: %r - %r', vol.volume_id, vol.path)
                if isinstance(vol, volume_manager.Share):
                    log_debug('Removing %r metadata', vol.volume_id)
                    self.vm.share_deleted(vol.volume_id)
                elif isinstance(vol, volume_manager.UDF):
                    log_debug('Unsubscribing %r', vol.volume_id)
                    self.vm.unsubscribe_udf(vol.volume_id)
                # if root is missing, we should crash and burn as it's
                # created on each startup!
                continue
            try:
                mdobj = self.fsm.get_by_path(vol.path)
            except KeyError:
                # this could happen in a strange corruption situation where FSM
                # lost the share information, so we remove it, because VM will
                # download it again
                self.vm.share_deleted(vol.volume_id)
            else:
                self._queue.appendleft((vol, vol.path, mdobj.mdid, False))

        # first of all, remove old partials and clean trash
        self._remove_partials()
        self._process_limbo()
        self._process_ro_shares()

        yield self._queue_scan()
        self._show_broken_nodes()
Ejemplo n.º 14
0
    def __init__(self, path, auto_merge=True, dead_bytes_threshold=0.5, max_immutable_files=20):
        """Initialize the instance.

        @param auto_merge: disable auto merge/compaction.
        @param dead_bytes_threshold: the limit factor of dead vs live bytes to
            trigger a merge and/or live file rotation.
        @param max_immutable_files: the max number of inactive files to use,
            once this value is reached a merge is triggered.
        """
        logger.info("Initializing Tritcask on: %s", path)
        self._keydir = Keydir()
        self.base_path = path
        self.dead_bytes_threshold = dead_bytes_threshold
        self.max_immutable_files = max_immutable_files
        self.auto_merge = auto_merge
        if not path_exists(self.base_path):
            make_dir(self.base_path, recursive=True)
        elif not is_dir(self.base_path):
            raise ValueError("path must be a directory.")
        self.live_file = None
        self._immutable = {}
        self._find_data_files()
        self._build_keydir()
        # now check if we should rotate the live file
        # and merge immutable ones
        self._rotate_and_merge()
        # check if we found a live data file
        # if not, define one (it will be created later)
        if self.live_file is None:
            # it's a clean start, let's create the first file
            self.live_file = DataFile(self.base_path)
    def test_broken_metadata_items(self):
        """Test that broken metadata is ignored during iteritems."""
        self.shelf['ok_key'] = {'status': 'this is valid metadata'}
        self.shelf['bad_file'] = {}
        path = self.shelf.key_file('bad_file')
        open_file(path, 'w').close()
        self.assertRaises(KeyError, self.shelf.__getitem__, 'bad_file')
        self.assertEqual(1, len(list(self.shelf.items())))
        self.assertFalse(path_exists(path))

        self.shelf['broken_pickle'] = {}
        path = self.shelf.key_file('broken_pickle')
        with open_file(path, 'w') as f:
            f.write(BROKEN_PICKLE)
        self.assertRaises(KeyError, self.shelf.__getitem__, 'broken_pickle')
        self.assertEqual(1, len(list(self.shelf.items())))
        self.assertFalse(path_exists(path))
Ejemplo n.º 16
0
 def _check_move_file(self, src, dst, real_dst):
     """Check that a file was indeed moved."""
     with open_file(src, "rb") as f:
         contents = f.read()
     recursive_move(src, dst)
     with open_file(real_dst, "rb") as f:
         self.assertEqual(contents, f.read())
     self.assertFalse(path_exists(src))
Ejemplo n.º 17
0
    def test_movetotrash_file_ok(self):
        """Move a file to trash ok.

        Just check it was removed because can't monkeypatch the trash.
        to see that that was actually called.
        """
        move_to_trash(self.testfile)
        self.assertFalse(path_exists(self.testfile))
Ejemplo n.º 18
0
 def mktemp(self, name="temp"):
     """ Customized mktemp that accepts an optional name argument. """
     tempdir = os.path.join(self.tmpdir, name)
     if path_exists(tempdir):
         self.rmtree(tempdir)
     self.makedirs(tempdir)
     self.addCleanup(self.rmtree, tempdir)
     assert isinstance(tempdir, str)
     return tempdir
Ejemplo n.º 19
0
 def add_share(self, share):
     """Add share to the shares dict."""
     self.shares[share.id] = share
     # if the share don't exists, create it
     if not path_exists(share.path):
         make_dir(share.path)
     # if it's a ro share, change the perms
     if not share.can_write():
         set_dir_readonly(share.path)
Ejemplo n.º 20
0
 def test_ignore_one(self):
     """Test ignore files config, one regex."""
     conf_file = os.path.join(self.test_root, 'test_new_config.conf')
     with open_file(conf_file, 'w') as fp:
         fp.write('[__main__]\n')
         fp.write('ignore = .*\\.pyc\n')  # all .pyc files
     self.assertTrue(path_exists(conf_file))
     self.cp.read([conf_file])
     self.cp.parse_all()
     self.assertEqual(self.cp.get('__main__', 'ignore').value, [r'.*\.pyc'])
Ejemplo n.º 21
0
    def test_movetotrash_dir_ok(self):
        """Move a dir to trash ok.

        Just check it was removed because can't monkeypatch the trash
        to see that that was actually called.
        """
        path = os.path.join(self.basedir, 'foo')
        make_dir(path)
        move_to_trash(path)
        self.assertFalse(path_exists(path))
Ejemplo n.º 22
0
    def test_set_dir_readwrite(self):
        """Test for set_dir_readwrite."""
        set_dir_readonly(self.basedir)
        # do not queue up any cleanup function since we're restoring perms in
        # the next call

        set_dir_readwrite(self.basedir)
        foo_dir = os.path.join(self.basedir, 'foo')
        os.mkdir(foo_dir)
        self.assertTrue(path_exists(foo_dir))
Ejemplo n.º 23
0
 def save(self):
     """Save the config object to disk"""
     # cleanup empty sections
     for section in [MAIN, THROTTLING, NOTIFICATIONS]:
         if self.has_section(section) and not self.options(section):
             self.remove_section(section)
     with open(self.config_file + '.new', 'w') as fp:
         self.write(fp)
     if path_exists(self.config_file):
         rename(self.config_file, self.config_file + '.old')
     rename(self.config_file + '.new', self.config_file)
Ejemplo n.º 24
0
 def test_log_level_new_config(self):
     """Test log_level upgrade hook with new config."""
     conf_file = os.path.join(self.test_root, 'test_new_config.conf')
     # write some throttling values to the config file
     with open_file(conf_file, 'w') as fp:
         fp.write('[logging]\n')
         fp.write('level = DEBUG\n')
     self.assertTrue(path_exists(conf_file))
     self.cp.read([conf_file])
     self.cp.parse_all()
     self.assertEqual(self.cp.get('logging', 'level').value, 10)
 def test_different_depth_sizes(self):
     """Test the basic operations (delitem, getitem, setitem) with
     depths between 0 and len(hashlib.sha1().hexdigest())
     """
     base_path = os.path.join(self.path, 'shelf_depth-')
     sha1 = hashlib.sha1()
     for idx in xrange(0, len(sha1.hexdigest())):
         path = base_path + str(idx)
         shelf = self.fileshelf_class(path, depth=idx)
         key = sha1.hexdigest()
         # test __setitem__
         shelf[key] = 'foo'
         key_path = os.path.join(path, *[key[i] for i in xrange(0, idx)])
         self.assertTrue(path_exists(os.path.join(key_path, key)))
         # test __getitem__
         self.assertEqual('foo', shelf[key])
         # test __delitem__
         del shelf[key]
         self.assertRaises(KeyError, shelf.__getitem__, key)
         self.assertFalse(path_exists(os.path.join(key_path, key)))
Ejemplo n.º 26
0
 def __init__(self, path):
     """Create the instance."""
     self.path = path
     self.tempfile = None
     if path_exists(self.path) and stat_path(self.path).st_size > 0:
         # if it's there and size > 0, open only for read
         self.fd = open_file(self.path, "rb")
     else:
         # this is a new hint file, lets create it as a tempfile.
         self.tempfile = tempfile.mktemp(dir=os.path.dirname(self.path))
         self.fd = open_file(self.tempfile, "w+b")
Ejemplo n.º 27
0
 def __setitem__(self, key, value):
     """ setitem backed by the file storage """
     path = self.key_file(key)
     new_path = path + ".new"
     old_path = path + ".old"
     self._check_and_create_dirs(os.path.dirname(path))
     with open_file(new_path, "wb") as fh:
         self._pickle(value, fh, protocol=2)
         fh.flush()
     if path_exists(path):
         rename(path, old_path)
     rename(new_path, path)
Ejemplo n.º 28
0
 def test_fs_monitor_not_default(self):
     """Test get monitor."""
     monitor_id = 'my_monitor'
     conf_file = os.path.join(self.test_root, 'test_new_config.conf')
     with open_file(conf_file, 'w') as fd:
         fd.write('[__main__]\n')
         fd.write('fs_monitor = %s\n' % monitor_id)
     self.assertTrue(path_exists(conf_file))
     self.cp.read([conf_file])
     self.cp.parse_all()
     self.assertEqual(
         self.cp.get('__main__', 'fs_monitor').value, monitor_id)
Ejemplo n.º 29
0
def create_shares_link(source, dest):
    """Create the shares symlink."""
    result = False
    if not path_exists(dest):
        # remove the symlink if it's broken
        if is_link(dest) and read_link(dest) != source:
            remove_link(dest)

        if not is_link(dest):
            # only create the link if it does not exist
            make_link(source, dest)
            result = True
    return result
Ejemplo n.º 30
0
 def test_write_new(self):
     """Test writing the throttling section to a new config file."""
     conf_file = os.path.join(self.test_root, 'test_write_new_config.conf')
     self.assertFalse(path_exists(conf_file))
     conf = config._Config(conf_file)
     conf.set_throttling(True)
     conf.set_throttling_read_limit(1000)
     conf.set_throttling_write_limit(100)
     conf.save()
     # load the config in a barebone ConfigParser and check
     conf_1 = ConfigParser()
     conf_1.read(conf_file)
     self.assertThrottlingSection(conf_1, conf, True, 1000, 100)