def test_posting_received_raw(self): book = p_utils.temporary_log_book() with base.connect_close(self.board): self.assertTrue(self.board.connected) self.assertEqual(0, self.board.job_count) posted_job = self.board.post('test', book) self.assertEqual(self.board, posted_job.board) self.assertEqual(1, self.board.job_count) self.assertIn(posted_job.uuid, [j.uuid for j in self.board.iterjobs()]) # Remove paths that got created due to the running process that we are # not interested in... paths = {} for (path, data) in six.iteritems(self.client.storage.paths): if path in self.bad_paths: continue paths[path] = data # Check the actual data that was posted. self.assertEqual(1, len(paths)) path_key = list(six.iterkeys(paths))[0] self.assertTrue(len(paths[path_key]['data']) > 0) self.assertDictEqual({ 'uuid': posted_job.uuid, 'name': posted_job.name, 'book': { 'name': book.name, 'uuid': book.uuid, }, 'details': {}, }, jsonutils.loads(misc.binary_decode(paths[path_key]['data'])))
def _get(): fd_path = os.path.join(self._flow_path, uuid) meta_path = os.path.join(fd_path, 'metadata') meta = jsonutils.loads(self._read_from(meta_path)) fd = p_utils.unformat_flow_detail(uuid, meta) td_to_load = [] td_path = os.path.join(fd_path, 'tasks') try: td_to_load = [f for f in os.listdir(td_path) if os.path.islink(os.path.join(td_path, f))] except EnvironmentError as e: if e.errno != errno.ENOENT: raise for t_uuid in td_to_load: fd.add(self._get_task_details(t_uuid)) return fd
def _get(): fd_path = os.path.join(self._flow_path, uuid) meta_path = os.path.join(fd_path, 'metadata') meta = jsonutils.loads(self._read_from(meta_path)) fd = _unformat_flow_detail(uuid, meta) td_to_load = [] td_path = os.path.join(fd_path, 'tasks') try: td_to_load = [f for f in os.listdir(td_path) if os.path.islink(os.path.join(td_path, f))] except EnvironmentError as e: if e.errno != errno.ENOENT: raise for t_uuid in td_to_load: fd.add(self._get_task_details(t_uuid)) return fd
def decode_json(raw_data, root_types=(dict,)): """Parse raw data to get JSON object. Decodes a JSON from a given raw data binary and checks that the root type of that decoded object is in the allowed set of types (by default a JSON object/dict should be the root type). """ try: data = jsonutils.loads(binary_decode(raw_data)) except UnicodeDecodeError as e: raise ValueError("Expected UTF-8 decodable data: %s" % e) except ValueError as e: raise ValueError("Expected JSON decodable data: %s" % e) if root_types and not isinstance(data, tuple(root_types)): ok_types = ", ".join(str(t) for t in root_types) raise ValueError("Expected (%s) root types not: %s" % (ok_types, type(data))) return data
def _get_logbook(self, book_uuid): book_path = os.path.join(self._book_path, book_uuid) meta_path = os.path.join(book_path, 'metadata') try: meta = jsonutils.loads(self._read_from(meta_path)) except EnvironmentError as e: if e.errno == errno.ENOENT: raise exc.NotFound("No logbook found with id: %s" % book_uuid) else: raise lb = p_utils.unformat_logbook(book_uuid, meta) fd_path = os.path.join(book_path, 'flows') fd_uuids = [] try: fd_uuids = [f for f in os.listdir(fd_path) if os.path.islink(os.path.join(fd_path, f))] except EnvironmentError as e: if e.errno != errno.ENOENT: raise for fd_uuid in fd_uuids: lb.add(self._get_flow_details(fd_uuid)) return lb
def _get_logbook(self, book_uuid): book_path = os.path.join(self._book_path, book_uuid) meta_path = os.path.join(book_path, 'metadata') try: meta = jsonutils.loads(self._read_from(meta_path)) except EnvironmentError as e: if e.errno == errno.ENOENT: raise exc.NotFound("No logbook found with id: %s" % book_uuid) else: raise lb = _unformat_logbook(book_uuid, meta) fd_path = os.path.join(book_path, 'flows') fd_uuids = [] try: fd_uuids = [f for f in os.listdir(fd_path) if os.path.islink(os.path.join(fd_path, f))] except EnvironmentError as e: if e.errno != errno.ENOENT: raise for fd_uuid in fd_uuids: lb.add(self._get_flow_details(fd_uuid)) return lb
def test_posting_received_raw(self): book = p_utils.temporary_log_book() with connect_close(self.board): self.client.flush() self.assertTrue(self.board.connected) self.assertEqual(0, self.board.job_count) posted_job = self.board.post('test', book) self.client.flush() self.assertEqual(self.board, posted_job.board) self.assertTrue(1, self.board.job_count) self.assertIn(posted_job.uuid, [j.uuid for j in self.board.iterjobs()]) # Remove paths that got created due to the running process that we are # not interested in... paths = {} for (path, data) in six.iteritems(self.client.storage.paths): if path in self.bad_paths: continue paths[path] = data # Check the actual data that was posted. self.assertEqual(1, len(paths)) path_key = list(six.iterkeys(paths))[0] self.assertIn(posted_job.uuid, path_key) self.assertTrue(len(paths[path_key]['data']) > 0) self.assertDictEqual( { 'uuid': posted_job.uuid, 'name': posted_job.name, 'book': { 'name': book.name, 'uuid': book.uuid, }, 'details': {}, }, jsonutils.loads(misc.binary_decode(paths[path_key]['data'])))
def _get(): td_path = os.path.join(self._task_path, uuid) td_data = jsonutils.loads(self._read_from(td_path)) return p_utils.unformat_task_detail(uuid, td_data)
def process_result_value(self, value, dialect): if value is None: return None return persistence_utils.failure_from_dict(jsonutils.loads(value))
def process_result_value(self, value, dialect): return jsonutils.loads(value)
def _get(): td_path = os.path.join(self._task_path, uuid) td_data = jsonutils.loads(self._read_from(td_path)) return _unformat_task_detail(uuid, td_data)