def test_posting_received_raw(self): book = p_utils.temporary_log_book() with base.connect_close(self.board): self.assertTrue(self.board.connected) self.assertEqual(0, self.board.job_count) posted_job = self.board.post('test', book) self.assertEqual(self.board, posted_job.board) self.assertEqual(1, self.board.job_count) self.assertIn(posted_job.uuid, [j.uuid for j in self.board.iterjobs()]) # Remove paths that got created due to the running process that we are # not interested in... paths = {} for (path, data) in six.iteritems(self.client.storage.paths): if path in self.bad_paths: continue paths[path] = data # Check the actual data that was posted. self.assertEqual(1, len(paths)) path_key = list(six.iterkeys(paths))[0] self.assertTrue(len(paths[path_key]['data']) > 0) self.assertDictEqual({ 'uuid': posted_job.uuid, 'name': posted_job.name, 'book': { 'name': book.name, 'uuid': book.uuid, }, 'details': {}, }, jsonutils.loads(misc.binary_decode(paths[path_key]['data'])))
def test_register_entity(self): conductor_name = "conductor-abc@localhost:4123" entity_instance = entity.Entity("conductor", conductor_name, {}) with base.connect_close(self.board): self.board.register_entity(entity_instance) # Check '.entity' node has been created self.assertTrue(self.board.entity_path in self.client.storage.paths) conductor_entity_path = k_paths.join(self.board.entity_path, 'conductor', conductor_name) self.assertTrue(conductor_entity_path in self.client.storage.paths) conductor_data = ( self.client.storage.paths[conductor_entity_path]['data']) self.assertTrue(len(conductor_data) > 0) self.assertDictEqual( { 'name': conductor_name, 'kind': 'conductor', 'metadata': {}, }, jsonutils.loads(misc.binary_decode(conductor_data))) entity_instance_2 = entity.Entity("non-sense", "other_name", {}) with base.connect_close(self.board): self.assertRaises(excp.NotImplementedError, self.board.register_entity, entity_instance_2)
def _read_from(self, filename): # This is very similar to the oslo-incubator fileutils module, but # tweaked to not depend on a global cache, as well as tweaked to not # pull-in the oslo logging module (which is a huge pile of code). mtime = os.path.getmtime(filename) cache_info = self.backend.file_cache.setdefault(filename, {}) if not cache_info or mtime > cache_info.get('mtime', 0): with open(filename, 'rb') as fp: cache_info['data'] = misc.binary_decode( fp.read(), encoding=self.backend.encoding) cache_info['mtime'] = mtime return cache_info['data']
def test_unicode_other_encoding(self): data = u'mañana'.encode('latin-1') result = misc.binary_decode(data, 'latin-1') self.assertIsInstance(result, six.text_type) self.assertEqual(result, u'mañana')
def _check(self, data, expected_result): result = misc.binary_decode(data) self.assertIsInstance(result, six.text_type) self.assertEqual(result, expected_result)