def _create_paths(self): """Create the paths for the tests. The following structure will be created: self.basedir/ |-> self.testfile |-> dir0/ |-> file0 |-> link |-> dir1/ |-> file1 |-> dir11/ |-> dir2/ |-> file2 |-> dir3/ """ open_file(self.testfile, 'w').close() for i in xrange(3): dir_name = 'dir%i' % i dir_path = os.path.join(self.basedir, dir_name) make_dir(dir_path, recursive=True) file_name = 'file%i' % i file_path = os.path.join(dir_path, file_name) open_file(file_path, "w").close() make_link(os.path.devnull, os.path.join(self.basedir, 'dir0', 'link')) make_dir(os.path.join(self.basedir, 'dir1', 'dir11')) make_dir(os.path.join(self.basedir, 'dir3'), recursive=True)
def test_broken_metadata_with_backup(self): """test that each time a metadata file is updated a .old is kept""" self.shelf['bad_file'] = {'value': 'old'} path = self.shelf.key_file('bad_file') self.assertFalse(path_exists(path+'.old')) self.assertEqual({'value': 'old'}, self.shelf['bad_file']) # force the creation of the .old file self.shelf['bad_file'] = {'value': 'new'} self.assertTrue(path_exists(path+'.old')) # check that the new value is there self.assertEqual({'value': 'new'}, self.shelf['bad_file']) # write the current md file fwith 0 bytes open_file(path, 'w').close() # test that the old value is retrieved self.assertEqual({'value': 'old'}, self.shelf['bad_file']) self.shelf['broken_pickle'] = {'value': 'old'} path = self.shelf.key_file('broken_pickle') # check that .old don't exist self.assertFalse(path_exists(path+'.old')) # force the creation of the .old file self.shelf['broken_pickle'] = {'value': 'new'} # check that .old exists self.assertTrue(path_exists(path+'.old')) # check that the new value is there self.assertEqual({'value': 'new'}, self.shelf['broken_pickle']) # write random bytes to the md file with open_file(path, 'w') as f: f.write(BROKEN_PICKLE) # check that the old value is retrieved self.assertEqual({'value': 'old'}, self.shelf['broken_pickle'])
def test_broken_metadata_with_backup(self): """overrides parent test as we have the value in the cache.""" self.shelf['bad_file'] = {'value': 'old'} path = self.shelf.key_file('bad_file') self.assertFalse(path_exists(path+'.old')) self.assertEqual({'value': 'old'}, self.shelf['bad_file']) # force the creation of the .old file self.shelf['bad_file'] = {'value': 'new'} self.assertTrue(path_exists(path+'.old')) # check that the new value is there self.assertEqual({'value': 'new'}, self.shelf['bad_file']) # write the current md file fwith 0 bytes open_file(path, 'w').close() # HERE IS THE DIFFERENCE with the parent tests # test that the new value is retrieved from the cache! self.assertEqual({'value': 'new'}, self.shelf['bad_file']) self.shelf['broken_pickle'] = {'value': 'old'} path = self.shelf.key_file('broken_pickle') # check that .old don't exist self.assertFalse(path_exists(path+'.old')) # force the creation of the .old file self.shelf['broken_pickle'] = {'value': 'new'} # check that .old exists self.assertTrue(path_exists(path+'.old')) # check that the new value is there self.assertEqual({'value': 'new'}, self.shelf['broken_pickle']) # write random bytes to the md file with open_file(path, 'w') as f: f.write(BROKEN_PICKLE) # HERE IS THE DIFFERENCE with the parent tests # test that the new value is retrieved from the cache! self.assertEqual({'value': 'new'}, self.shelf['broken_pickle'])
def test_open_file_write(self): """Open a file, and write.""" fh = open_file(self.testfile, 'w') fh.write("foo") fh.close() f = open_file(self.testfile) self.assertTrue(f.read(), "foo")
def test_set_file_readwrite(self): """Test for set_file_readwrite.""" set_file_readonly(self.testfile) self.addCleanup(set_dir_readwrite, self.testfile) set_file_readwrite(self.testfile) open_file(self.testfile, 'w') self.assertTrue(can_write(self.testfile))
def setUp(self, test_dir_name=None, test_file_name=None, valid_file_path_builder=None): """Setup for the tests.""" yield super(OSWrapperTests, self).setUp( test_dir_name=test_dir_name, test_file_name=test_file_name, valid_file_path_builder=valid_file_path_builder) # make sure the file exists open_file(self.testfile, 'w').close()
def test_bad_path(self): """Test that the shelf removes the previous shelve file and create a directory for the new file based shelf at creation time. """ path = os.path.join(self.path, 'shelf_file') open_file(path, 'w').close() self.fileshelf_class(path) self.assertTrue(os.path.isdir(path))
def _check_move_file(self, src, dst, real_dst): """Check that a file was indeed moved.""" with open_file(src, "rb") as f: contents = f.read() recursive_move(src, dst) with open_file(real_dst, "rb") as f: self.assertEqual(contents, f.read()) self.assertFalse(path_exists(src))
def test_movetotrash_file_bad(self): """Something bad happen when moving to trash, removed anyway.""" path = os.path.join(self.basedir, 'foo') open_file(path, 'w').close() move_to_trash(path) self.assertFalse(os.path.exists(path)) self.assertTrue(self.handler.check_warning("Problems moving to trash!", "Removing anyway", "foo"))
def test_corrupted_backup(self): """test getitem if also the .old file is corrupted""" self.shelf["foo"] = "bar" # create the .old backup self.shelf["foo"] = "bar1" # write 0 bytes to both open_file(self.shelf.key_file('foo')+'.old', 'w').close() open_file(self.shelf.key_file('foo'), 'w').close() self.assertRaises(KeyError, self.shelf.__getitem__, 'foo')
def test_endless_borken_backups(self): """test getitem with a lot of files named .old.old.....old""" self.shelf["foo"] = "bar" path = self.shelf.key_file('foo') open_file(self.shelf.key_file('foo'), 'w').close() for _ in xrange(20): open_file(path + '.old', 'w').close() path += '.old' self.assertRaises(KeyError, self.shelf.__getitem__, 'foo')
def test_movetotrash_file_systemnotcapable(self): """The system is not capable of moving into trash.""" FakeGIOFile._bad_trash_call = GIO_NOT_SUPPORTED self.patch(gio, "File", FakeGIOFile) path = os.path.join(self.basedir, 'foo') open_file(path, 'w').close() move_to_trash(path) self.assertFalse(os.path.exists(path)) self.assertTrue(self.handler.check_warning("Problems moving to trash!", "Removing anyway", "foo", "ERROR_NOT_SUPPORTED"))
def test_movetotrash_dir_bad(self): """Something bad happen when moving to trash, removed anyway.""" FakeGIOFile._bad_trash_call = False # error self.patch(gio, "File", FakeGIOFile) path = os.path.join(self.basedir, 'foo') os.mkdir(path) open_file(os.path.join(path, 'file inside directory'), 'w').close() move_to_trash(path) self.assertFalse(os.path.exists(path)) self.assertTrue(self.handler.check_warning("Problems moving to trash!", "Removing anyway", "foo"))
def __init__(self, path): """Create the instance.""" self.path = path self.tempfile = None if path_exists(self.path) and stat_path(self.path).st_size > 0: # if it's there and size > 0, open only for read self.fd = open_file(self.path, "rb") else: # this is a new hint file, lets create it as a tempfile. self.tempfile = tempfile.mktemp(dir=os.path.dirname(self.path)) self.fd = open_file(self.tempfile, "w+b")
def setUp(self, test_dir_name=None, test_file_name=None, valid_file_path_builder=None): """Setup for the tests.""" yield super(RecursiveMoveTests, self).setUp( test_dir_name=test_dir_name, test_file_name=test_file_name, valid_file_path_builder=valid_file_path_builder) # make sure the file exists open_file(self.testfile, 'w').close() self._create_recursive_move_dirs() self.addCleanup(self._clean_recursive_move_dirs)
def test_delete_backups_too(self): """test that delitem also deletes the .old/.new files left around""" self.shelf["foo"] = "bar" # create the .old backup self.shelf["foo"] = "bar1" path = self.shelf.key_file('foo') # create a .new file (a hard reboot during the rename dance) open_file(path+'.new', 'w').close() # write 0 bytes to both del self.shelf['foo'] self.assertFalse(path_exists(path)) self.assertFalse(path_exists(path+'.old'), 'there is a .old file!') self.assertFalse(path_exists(path+'.new'), 'there is a .new file!')
def test_broken_metadata_without_backup(self): """test the shelf behavior when it hit a broken metadata file without backup. """ self.shelf['bad_file'] = {} path = self.shelf.key_file('bad_file') open_file(path, 'w').close() self.assertRaises(KeyError, self.shelf.__getitem__, 'bad_file') self.shelf['broken_pickle'] = {} path = self.shelf.key_file('broken_pickle') with open_file(path, 'w') as f: f.write(BROKEN_PICKLE) self.assertRaises(KeyError, self.shelf.__getitem__, 'broken_pickle')
def test_listdir(self, expected_result=None): """Return a list of the files in a dir.""" if expected_result is None: _, valid_path_name = os.path.split(self.testfile) expected_result = [valid_path_name] for extra in ('foo', 'bar'): open_file(os.path.join(self.basedir, extra), 'w').close() expected_result.append(extra) l = listdir(self.basedir) self.assertEqual(sorted(l), sorted(expected_result)) for path in l: self.assertIsInstance(path, type(self.basedir))
def test_open_file_with_rb(self): """Check that the file to hash is opened with 'rb' mode.""" called = [] orig = hash_queue.open_file def faked_open_file(*a): called.append(a) return orig(*a) self.patch(hash_queue, 'open_file', faked_open_file) queue = hash_queue.UniqueQueue() testfile = os.path.join(self.test_dir, "testfile") with open_file(testfile, "wb") as fh: fh.write("foobar") item = ((testfile, "mdid"), FAKE_TIMESTAMP) queue.put(item) d = defer.Deferred() eq = FakeEventQueue(d) hasher = hash_queue._Hasher(queue=queue, end_mark='end-mark', event_queue=eq) # start the hasher after putting the work items hasher.start() yield d hasher.stop() self.assertEqual(called, [(testfile, 'rb')])
def test_shutdown_while_hashing(self): """Test that the HashQueue is shutdown ASAP while it's hashing.""" # create large data in order to test testinfo = os.urandom(500000) hasher = content_hash_factory() hasher.hash_object.update(testinfo) testfile = os.path.join(self.test_dir, "testfile") # send what to hash with open_file(testfile, "wb") as fh: fh.write(testinfo) class Helper(object): """Helper class.""" def push(self, event, **kwargs): """Callback.""" receiver = Helper() hq = hash_queue.HashQueue(receiver) # read in small chunks, so we have more iterations hq.hasher.chunk_size = 2**10 hq.insert(testfile, "mdid") time.sleep(0.1) hq.shutdown() # block until the hash is stopped and the queue is empty # a shutdown clears the queue hq._queue.join() self.assertFalse(hq.hasher.hashing) self.assertTrue(hq.hasher._stopped) #self.assertFalse(hq.hasher.isAlive()) self.assertTrue(hq._queue.empty())
def test_write_extra(self): """Writing the throttling back to the file, with extra sections.""" conf_file = os.path.join( self.test_root, 'test_write_extra_config.conf') # write some throttling values to the config file with open_file(conf_file, 'w') as fp: fp.write('[__main__]\n') fp.write('log_level = INFO\n') fp.write('disable_ssl_verify = True\n') fp.write('\n') fp.write('[bandwidth_throttling]\n') fp.write('on = False\n') fp.write('read_limit = 2000\n') fp.write('write_limit = 200\n') self.assertTrue(path_exists(conf_file)) conf = config._Config(conf_file) conf.set_throttling(True) conf.set_throttling_read_limit(3000) conf.set_throttling_write_limit(300) conf.save() # load the config in a barebone ConfigParser and check conf_1 = ConfigParser() conf_1.read(conf_file) self.assertThrottlingSection(conf_1, conf, True, 3000, 300) self.assertEqual(conf_1.get('__main__', 'log_level'), conf.get('__main__', 'log_level')) self.assertEqual(conf_1.getboolean('__main__', 'disable_ssl_verify'), conf.getboolean('__main__', 'disable_ssl_verify'))
def _hash(self, path): """Actually hashes a file.""" hasher = content_hash_factory() crc = 0 size = 0 try: initial_stat = stat_path(path) with open_file(path, 'rb') as fh: while True: # stop hashing if path_to_cancel = path or _stopped is True with self.mutex: path_to_cancel = self._should_cancel if path_to_cancel == path or self._stopped: raise StopHashing('hashing of %r was cancelled' % path) cont = fh.read(self.chunk_size) if not cont: break hasher.update(cont) crc = crc32(cont, crc) size += len(cont) finally: with self.mutex: self._should_cancel = None return hasher.content_hash(), crc, size, initial_stat
def test_keys_with_old_and_new(self): """test keys() with .old and .new files around""" self.shelf["foo"] = "bar" self.shelf["foo1"] = "bar1" open_file(self.shelf.key_file('foo')+'.old', 'w').close() open_file(self.shelf.key_file('foo1')+'.old', 'w').close() open_file(self.shelf.key_file('foo')+'.new', 'w').close() open_file(self.shelf.key_file('foo1')+'.new', 'w').close() self.assertEqual(set(['foo', 'foo1']), set(self.shelf.keys()))
def test_broken_metadata_items(self): """Test that broken metadata is ignored during iteritems.""" self.shelf['ok_key'] = {'status': 'this is valid metadata'} self.shelf['bad_file'] = {} path = self.shelf.key_file('bad_file') open_file(path, 'w').close() self.assertRaises(KeyError, self.shelf.__getitem__, 'bad_file') self.assertEqual(1, len(list(self.shelf.items()))) self.assertFalse(path_exists(path)) self.shelf['broken_pickle'] = {} path = self.shelf.key_file('broken_pickle') with open_file(path, 'w') as f: f.write(BROKEN_PICKLE) self.assertRaises(KeyError, self.shelf.__getitem__, 'broken_pickle') self.assertEqual(1, len(list(self.shelf.items()))) self.assertFalse(path_exists(path))
def test_get_simult_transfers(self): """Get simult transfers.""" conf_file = os.path.join(self.test_root, 'test_load_config.conf') with open_file(conf_file, 'w') as fh: fh.write('[__main__]\n') fh.write('simult_transfers = 12345\n') conf = config._Config(conf_file) self.assertEqual(conf.get_simult_transfers(), 12345)
def test_get_max_payload_size(self): """Get the maximum payload size.""" conf_file = os.path.join(self.test_root, 'test_load_config.conf') with open_file(conf_file, 'w') as fh: fh.write('[__main__]\n') fh.write('max_payload_size = 12345\n') conf = config._Config(conf_file) self.assertEqual(conf.get_max_payload_size(), 12345)
def test_get_memory_pool_limit(self): """Get the memory pool limit.""" conf_file = os.path.join(self.test_root, 'test_load_config.conf') with open_file(conf_file, 'w') as fh: fh.write('[__main__]\n') fh.write('memory_pool_limit = 12345\n') conf = config._Config(conf_file) self.assertEqual(conf.get_memory_pool_limit(), 12345)
def test_unique(self): """The hasher should return in order.""" # calculate what we should receive should_be = [] for i in range(10): hasher = content_hash_factory() text = "supercalifragilistico"+str(i) hasher.hash_object.update(text) tfile = os.path.join(self.test_dir, "tfile"+str(i)) with open_file(tfile, "wb") as fh: fh.write("supercalifragilistico"+str(i)) d = dict(path=tfile, hash=hasher.content_hash(), crc32=crc32(text), size=len(text), stat=stat_path(tfile)) should_be.append(("HQ_HASH_NEW", d)) d = defer.Deferred() class Helper(object): """Helper class.""" # class-closure, cannot use self, pylint: disable-msg=E0213 def __init__(innerself): innerself.store = [] def push(innerself, event, **kwargs): """Callback.""" innerself.store.append((event, kwargs)) if len(innerself.store) == 10: if innerself.store == should_be: d.callback(True) else: d.errback(Exception("are different!")) receiver = Helper() hq = hash_queue.HashQueue(receiver) self.addCleanup(hq.shutdown) # stop the hasher so we can test the unique items in the queue hq.hasher.stop() self.log.debug('Hasher stopped (forced)') # allow the hasher to fully stop time.sleep(0.1) # create a new hasher just like the HashQueue creates it hq.hasher = hash_queue._Hasher(hq._queue, hq._end_mark, receiver) hq.hasher.setDaemon(True) # send to hash twice for i in range(10): tfile = os.path.join(self.test_dir, "tfile"+str(i)) hq.insert(tfile, "mdid") hq.insert(tfile, "mdid") # start the hasher self.log.debug('Hasher started (forced)') hq.hasher.start() # insert the last item to check the uniqueness in the queue while # the hasher is running for i in range(9, 10): tfile = os.path.join(self.test_dir, "tfile"+str(i)) hq.insert(tfile, "mdid") return d
def test_ignore_one(self): """Test ignore files config, one regex.""" conf_file = os.path.join(self.test_root, 'test_new_config.conf') with open_file(conf_file, 'w') as fp: fp.write('[__main__]\n') fp.write('ignore = .*\\.pyc\n') # all .pyc files self.assertTrue(path_exists(conf_file)) self.cp.read([conf_file]) self.cp.parse_all() self.assertEqual(self.cp.get('__main__', 'ignore').value, [r'.*\.pyc'])
def test_log_level_new_config(self): """Test log_level upgrade hook with new config.""" conf_file = os.path.join(self.test_root, 'test_new_config.conf') # write some throttling values to the config file with open_file(conf_file, 'w') as fp: fp.write('[logging]\n') fp.write('level = DEBUG\n') self.assertTrue(path_exists(conf_file)) self.cp.read([conf_file]) self.cp.parse_all() self.assertEqual(self.cp.get('logging', 'level').value, 10)