def test_retry_detail_save_with_task_failure(self): lb_id = uuidutils.generate_uuid() lb_name = 'lb-%s' % (lb_id) lb = logbook.LogBook(name=lb_name, uuid=lb_id) fd = logbook.FlowDetail('test', uuid=uuidutils.generate_uuid()) lb.add(fd) rd = logbook.RetryDetail("retry-1", uuid=uuidutils.generate_uuid()) fail = misc.Failure.from_exception(RuntimeError('fail')) rd.results.append((42, {'some-task': fail})) fd.add(rd) # save it with contextlib.closing(self._get_connection()) as conn: conn.save_logbook(lb) conn.update_flow_details(fd) conn.update_atom_details(rd) # now read it back with contextlib.closing(self._get_connection()) as conn: lb2 = conn.get_logbook(lb_id) fd2 = lb2.find(fd.uuid) rd2 = fd2.find(rd.uuid) self.assertIsInstance(rd2, logbook.RetryDetail) fail2 = rd2.results[0][1].get('some-task') self.assertIsInstance(fail2, misc.Failure) self.assertTrue(fail.matches(fail2))
def post_remote_pipeline_job(pipeline): ME = os.getpid() print("Starting poster with pid: %s" % ME) my_name = "poster-%s" % ME persist_backend = backend_helper.default_persistence_backend() with contextlib.closing(persist_backend): with contextlib.closing(persist_backend.get_connection()) as conn: conn.upgrade() job_backend = backend_helper.default_jobboard_backend(my_name) job_backend.connect() with contextlib.closing(job_backend): # Create information in the persistence backend about the # unit of work we want to complete and the factory that # can be called to create the tasks that the work unit needs # to be done. lb = logbook.LogBook("post-from-%s" % my_name) fd = logbook.FlowDetail("sample-from-%s" % my_name, uuidutils.generate_uuid()) lb.add(fd) with contextlib.closing(persist_backend.get_connection()) as conn: conn.save_logbook(lb) engines.save_factory_details(fd, pipeline_factory.make_pipeline_flow, [pipeline.name], pipeline.kwargs, backend=persist_backend) # Post, and be done with it! jb = job_backend.post("sample-job-from-%s" % my_name, book=lb) print("Posted: %s" % jb) return jb
def save_logbook(self, book): # Get a existing logbook model (or create it if it isn't there). try: e_lb = self.backend.log_books[book.uuid] except KeyError: e_lb = logbook.LogBook(book.name, book.uuid, updated_at=book.updated_at, created_at=timeutils.utcnow()) self.backend.log_books[e_lb.uuid] = e_lb else: # TODO(harlowja): figure out a better way to set this property # without actually setting a 'private' property. e_lb._updated_at = timeutils.utcnow() p_utils.logbook_merge(e_lb, book, deep_copy=True) # Add anything in to the new logbook that isn't already # in the existing logbook. for flow_detail in book: try: e_fd = self.backend.flow_details[flow_detail.uuid] except KeyError: e_fd = logbook.FlowDetail(name=flow_detail.name, uuid=flow_detail.uuid) e_lb.add(flow_detail) self.backend.flow_details[flow_detail.uuid] = e_fd p_utils.flow_details_merge(e_fd, flow_detail, deep_copy=True) self._save_flowdetail_tasks(e_fd, flow_detail) return e_lb
def _unformat_logbook(uuid, lb_data): lb = logbook.LogBook(name=lb_data['name'], uuid=uuid, updated_at=_str_2_datetime(lb_data['updated_at']), created_at=_str_2_datetime(lb_data['created_at'])) lb.meta = lb_data.get('meta') return lb
def run_poster(): # This just posts a single job and then ends... print("Starting poster with pid: %s" % ME) my_name = "poster-%s" % ME persist_backend = persistence_backends.fetch(PERSISTENCE_URI) with contextlib.closing(persist_backend): with contextlib.closing(persist_backend.get_connection()) as conn: conn.upgrade() job_backend = job_backends.fetch(my_name, JB_CONF, persistence=persist_backend) job_backend.connect() with contextlib.closing(job_backend): # Create information in the persistence backend about the # unit of work we want to complete and the factory that # can be called to create the tasks that the work unit needs # to be done. lb = logbook.LogBook("post-from-%s" % my_name) fd = logbook.FlowDetail("song-from-%s" % my_name, uuidutils.generate_uuid()) lb.add(fd) with contextlib.closing(persist_backend.get_connection()) as conn: conn.save_logbook(lb) engines.save_factory_details(fd, make_bottles, [HOW_MANY_BOTTLES], {}, backend=persist_backend) # Post, and be done with it! jb = job_backend.post("song-from-%s" % my_name, book=lb) print("Posted: %s" % jb) print("Goodbye...")
def test_retry_detail_save_intention(self): lb_id = uuidutils.generate_uuid() lb_name = 'lb-%s' % (lb_id) lb = logbook.LogBook(name=lb_name, uuid=lb_id) fd = logbook.FlowDetail('test', uuid=uuidutils.generate_uuid()) lb.add(fd) rd = logbook.RetryDetail("retry-1", uuid=uuidutils.generate_uuid()) fd.add(rd) # save it with contextlib.closing(self._get_connection()) as conn: conn.save_logbook(lb) conn.update_flow_details(fd) conn.update_atom_details(rd) # change intention and save rd.intention = states.REVERT with contextlib.closing(self._get_connection()) as conn: conn.update_atom_details(rd) # now read it back with contextlib.closing(self._get_connection()) as conn: lb2 = conn.get_logbook(lb_id) fd2 = lb2.find(fd.uuid) rd2 = fd2.find(rd.uuid) self.assertEqual(rd2.intention, states.REVERT) self.assertIsInstance(rd2, logbook.RetryDetail)
def test_logbook_save(self): # Create a generic logbook to save lb_id = uuidutils.generate_uuid() lb_name = 'lb-%s' % (lb_id) lb = logbook.LogBook(lb_name, lb_id) # Save the logbook and record its uuid and name b_api.logbook_save(lb) self.lb_names.append(lb_name) self.lb_ids.append(lb_id) # Check that the saved logbook exists in the backend actual = b_api.logbook_get(lb_id) self.assertIsNotNone(actual) # Check that the saved logbook has no flowdetails self.assertEquals(len(actual), 0) # Add a flowdetail to the logbook fd = b_api.flowdetail_get(self.fd_ids[0]) lb.add_flow_detail(fd) # Save the updated logbook b_api.logbook_save(lb) # Check that the updated logbook is still in the backend actual = b_api.logbook_get(lb_id) self.assertIsNotNone(actual) # Check that the added flowdetail was recorded self.assertEquals(len(actual), 1)
def test_task_detail_with_failure(self): lb_id = uuidutils.generate_uuid() lb_name = 'lb-%s' % (lb_id) lb = logbook.LogBook(name=lb_name, uuid=lb_id) fd = logbook.FlowDetail('test', uuid=uuidutils.generate_uuid()) lb.add(fd) td = logbook.TaskDetail("detail-1", uuid=uuidutils.generate_uuid()) try: raise RuntimeError('Woot!') except Exception: td.failure = misc.Failure() fd.add(td) with contextlib.closing(self._get_connection()) as conn: conn.save_logbook(lb) conn.update_flow_details(fd) conn.update_task_details(td) # Read failure back with contextlib.closing(self._get_connection()) as conn: lb2 = conn.get_logbook(lb_id) fd2 = lb2.find(fd.uuid) td2 = fd2.find(td.uuid) failure = td2.failure self.assertEqual(failure.exception_str, 'Woot!') self.assertIs(failure.check(RuntimeError), RuntimeError) self.assertEqual(failure.traceback_str, td.failure.traceback_str)
def test_task_detail_meta_update(self): lb_id = uuidutils.generate_uuid() lb_name = 'lb-%s' % (lb_id) lb = logbook.LogBook(name=lb_name, uuid=lb_id) fd = logbook.FlowDetail('test', uuid=uuidutils.generate_uuid()) lb.add(fd) td = logbook.TaskDetail("detail-1", uuid=uuidutils.generate_uuid()) td.meta = {'test': 42} fd.add(td) with contextlib.closing(self._get_connection()) as conn: conn.save_logbook(lb) conn.update_flow_details(fd) conn.update_atom_details(td) td.meta['test'] = 43 with contextlib.closing(self._get_connection()) as conn: conn.update_atom_details(td) with contextlib.closing(self._get_connection()) as conn: lb2 = conn.get_logbook(lb_id) fd2 = lb2.find(fd.uuid) td2 = fd2.find(td.uuid) self.assertEqual(td2.meta.get('test'), 43) self.assertIsInstance(td2, logbook.TaskDetail)
def submit_task(self, flow_factory, **kwargs): """submit a task. """ with self.persistence as persistence: with self.driver.job_board( self.jobboard_backend_conf_worker.copy(), persistence=persistence) as board: job_id = uuidutils.generate_uuid() job_name = '-'.join([flow_factory.__name__, job_id]) job_logbook = logbook.LogBook(job_name) flow_detail = logbook.FlowDetail(job_name, uuidutils.generate_uuid()) factory_args = () factory_kwargs = {} engines.save_factory_details(flow_detail, flow_factory, factory_args, factory_kwargs) job_logbook.add(flow_detail) persistence.get_connection().save_logbook(job_logbook) job_details = { 'store': kwargs } job = board.post(job_name, book=job_logbook, details=job_details) LOG.info("Posted: {0}".format(job))
def test_logbook_merge_flow_detail(self): lb_id = uuidutils.generate_uuid() lb_name = 'lb-%s' % (lb_id) lb = logbook.LogBook(name=lb_name, uuid=lb_id) fd = logbook.FlowDetail('test', uuid=uuidutils.generate_uuid()) lb.add(fd) with contextlib.closing(self._get_connection()) as conn: conn.save_logbook(lb) lb2 = logbook.LogBook(name=lb_name, uuid=lb_id) fd2 = logbook.FlowDetail('test2', uuid=uuidutils.generate_uuid()) lb2.add(fd2) with contextlib.closing(self._get_connection()) as conn: conn.save_logbook(lb2) with contextlib.closing(self._get_connection()) as conn: lb3 = conn.get_logbook(lb_id) self.assertEqual(2, len(lb3))
def test_logbook_add_task_detail(self): lb_id = uuidutils.generate_uuid() lb_name = 'lb-%s' % (lb_id) lb = logbook.LogBook(name=lb_name, uuid=lb_id) fd = logbook.FlowDetail('test', uuid=uuidutils.generate_uuid()) td = logbook.TaskDetail("detail-1", uuid=uuidutils.generate_uuid()) td.version = '4.2' fd.add(td) lb.add(fd) with contextlib.closing(self._get_connection()) as conn: conn.save_logbook(lb) with contextlib.closing(self._get_connection()) as conn: lb2 = conn.get_logbook(lb_id) self.assertEqual(1, len(lb2)) tasks = 0 for fd in lb: tasks += len(fd) self.assertEqual(1, tasks) with contextlib.closing(self._get_connection()) as conn: lb2 = conn.get_logbook(lb_id) fd2 = lb2.find(fd.uuid) td2 = fd2.find(td.uuid) self.assertIsNot(td2, None) self.assertEqual(td2.name, 'detail-1') self.assertEqual(td2.version, '4.2')
def test_logbook_simple_save(self): lb_id = uuidutils.generate_uuid() lb_meta = {'1': 2} lb_name = 'lb-%s' % (lb_id) lb = logbook.LogBook(name=lb_name, uuid=lb_id, backend=self._get_backend()) lb.meta = lb_meta # Should not already exist self.assertRaises(exc.NotFound, logbook.load, lb_id, backend=self._get_backend()) lb.save() del lb lb = None lb = logbook.load(lb_id, backend=self._get_backend()) self.assertEquals(lb_name, lb.name) self.assertEquals(0, len(lb)) self.assertEquals(lb_meta, lb.meta) self.assertIsNone(lb.updated_at) self.assertIsNotNone(lb.created_at)
def _convert_lb_to_external(lb_m): lb_c = logbook.LogBook(lb_m.name, lb_m.uuid) lb_c.updated_at = lb_m.updated_at lb_c.created_at = lb_m.created_at lb_c.meta = lb_m.meta for fd_m in lb_m.flowdetails: lb_c.add(_convert_fd_to_external(fd_m)) return lb_c
def _convert_lb_to_external(lb_m): """Don't expose the internal sqlalchemy ORM model to the external api.""" lb_c = logbook.LogBook(lb_m.name, lb_m.uuid, updated_at=lb_m.updated_at, created_at=lb_m.created_at) lb_c.meta = lb_m.meta for fd_m in lb_m.flowdetails: lb_c.add(_convert_fd_to_external(fd_m)) return lb_c
def test_logbook_merge_flow_detail(self): lb_id = uuidutils.generate_uuid() lb_name = 'lb-%s' % (lb_id) lb = logbook.LogBook(name=lb_name, uuid=lb_id, backend=self._get_backend()) fd = flowdetail.FlowDetail('test', uuid=uuidutils.generate_uuid()) lb.add(fd) lb.save() lb2 = logbook.LogBook(name=lb_name, uuid=lb_id, backend=self._get_backend()) fd = flowdetail.FlowDetail('test2', uuid=uuidutils.generate_uuid()) lb2.add(fd) lb2.save() lb3 = logbook.load(lb_id, backend=self._get_backend()) self.assertEquals(2, len(lb3))
def _load_book(self, book_uuid, book_name): # No backend to attempt to fetch from :-( if self._backend is None: return logbook.LogBook(name=book_name, uuid=book_uuid) # TODO(harlowja): we are currently limited by assuming that the job # posted has the same backend as this loader (to start this seems to # be a ok assumption, and can be adjusted in the future if we determine # there is a use-case for multi-backend loaders, aka a registry of # loaders). with contextlib.closing(self._backend.get_connection()) as conn: return conn.get_logbook(book_uuid)
def temporary_log_book(backend=None): """Creates a temporary logbook for temporary usage in the given backend. Mainly useful for tests and other use cases where a temporary logbook is needed for a short-period of time. """ book = logbook.LogBook('tmp') if backend is not None: with contextlib.closing(backend.get_connection()) as conn: conn.save_logbook(book) return book
def test_logbook_lazy_fetch(self): lb_id = uuidutils.generate_uuid() lb_name = 'lb-%s' % (lb_id) lb = logbook.LogBook(name=lb_name, uuid=lb_id) fd = logbook.FlowDetail('test', uuid=uuidutils.generate_uuid()) lb.add(fd) with contextlib.closing(self._get_connection()) as conn: conn.save_logbook(lb) with contextlib.closing(self._get_connection()) as conn: lb2 = conn.get_logbook(lb_id, lazy=True) self.assertEqual(0, len(lb2)) self.assertEqual(1, len(lb))
def create_job(args): store = json.loads(args.details) book = logbook.LogBook(args.job_name) if example_utils.SQLALCHEMY_AVAILABLE: persist_path = os.path.join(tempfile.gettempdir(), "persisting.db") backend_uri = "sqlite:///%s" % (persist_path) else: persist_path = os.path.join(tempfile.gettempdir(), "persisting") backend_uri = "file:///%s" % (persist_path) with example_utils.get_backend(backend_uri) as backend: backend.get_connection().save_logbook(book) with jobboard(args.board_name, conf, persistence=backend) as jb: jb.post(args.job_name, book, details=store)
def test_logbook_add_flow_detail(self): lb_id = uuidutils.generate_uuid() lb_name = 'lb-%s' % (lb_id) lb = logbook.LogBook(name=lb_name, uuid=lb_id) fd = logbook.FlowDetail('test', uuid=uuidutils.generate_uuid()) lb.add(fd) with contextlib.closing(self._get_connection()) as conn: conn.save_logbook(lb) with contextlib.closing(self._get_connection()) as conn: lb2 = conn.get_logbook(lb_id) self.assertEqual(1, len(lb2)) self.assertEqual(1, len(lb)) self.assertEqual(fd.name, lb2.find(fd.uuid).name)
def test_logbook_delete(self): lb_id = uuidutils.generate_uuid() lb_name = 'lb-%s' % (lb_id) lb = logbook.LogBook(name=lb_name, uuid=lb_id) with contextlib.closing(self._get_connection()) as conn: self.assertRaises(exc.NotFound, conn.destroy_logbook, lb_id) with contextlib.closing(self._get_connection()) as conn: conn.save_logbook(lb) with contextlib.closing(self._get_connection()) as conn: lb2 = conn.get_logbook(lb_id) self.assertIsNotNone(lb2) with contextlib.closing(self._get_connection()) as conn: conn.destroy_logbook(lb_id) self.assertRaises(exc.NotFound, conn.destroy_logbook, lb_id)
def test_flow_detail_lazy_fetch(self): lb_id = uuidutils.generate_uuid() lb_name = 'lb-%s' % (lb_id) lb = logbook.LogBook(name=lb_name, uuid=lb_id) fd = logbook.FlowDetail('test', uuid=uuidutils.generate_uuid()) td = logbook.TaskDetail("detail-1", uuid=uuidutils.generate_uuid()) td.version = '4.2' fd.add(td) lb.add(fd) with contextlib.closing(self._get_connection()) as conn: conn.save_logbook(lb) with contextlib.closing(self._get_connection()) as conn: fd2 = conn.get_flow_details(fd.uuid, lazy=True) self.assertEqual(0, len(fd2)) self.assertEqual(1, len(fd))
def temporary_flow_detail(): """Creates flow detail class for temporary usage Creates in-memory logbook and flow detail in it. Should be useful for tests and other use cases where persistence is not needed """ lb = logbook.LogBook('tmp', backend='memory') fd = flowdetail.FlowDetail(name='tmp', uuid=uuidutils.generate_uuid(), backend='memory') lb.add(fd) lb.save() fd.save() return fd
def logbook_get(lb_id): """Gets a LogBook with matching lb_id, if it exists""" # Get a session to interact with the database session = sql_session.get_session() with session.begin(): # Get the LogBook model from the database lb = _logbook_get_model(lb_id, session=session) # Create a generic LogBook to return retVal = logbook.LogBook(lb.name, lb.logbook_id) # Add the generic FlowDetails associated with this LogBook for fd in lb.flowdetails: retVal.add_flow_detail(flowdetail_get(fd.flowdetail_id)) return retVal
def test_flow_detail_save(self): lb_id = uuidutils.generate_uuid() lb_name = 'lb-%s' % (lb_id) lb = logbook.LogBook(name=lb_name, uuid=lb_id, backend=self._get_backend()) fd = flowdetail.FlowDetail('test', uuid=uuidutils.generate_uuid()) lb.add(fd) # Ensure we can't save it since its owning logbook hasn't been # saved. self.assertRaises(exc.NotFound, fd.save) # Ok now we should be able to save it lb.save() fd.save()
def test_logbook_delete(self): lb_id = uuidutils.generate_uuid() lb_name = 'lb-%s' % (lb_id) lb = logbook.LogBook(name=lb_name, uuid=lb_id, backend=self._get_backend()) # Ensure we can't delete it since it hasn't been saved self.assertRaises(exc.NotFound, lb.delete) lb.save() lb2 = logbook.load(lb_id, backend=self._get_backend()) self.assertIsNotNone(lb2) lb.delete() self.assertRaises(exc.NotFound, lb.delete)
def test_flow_detail_save(self): lb_id = uuidutils.generate_uuid() lb_name = 'lb-%s' % (lb_id) lb = logbook.LogBook(name=lb_name, uuid=lb_id) fd = logbook.FlowDetail('test', uuid=uuidutils.generate_uuid()) lb.add(fd) # Ensure we can't save it since its owning logbook hasn't been # saved (flow details can not exist on their own without a connection # to a logbook). with contextlib.closing(self._get_connection()) as conn: self.assertRaises(exc.NotFound, conn.get_logbook, lb_id) self.assertRaises(exc.NotFound, conn.update_flow_details, fd) # Ok now we should be able to save both. with contextlib.closing(self._get_connection()) as conn: conn.save_logbook(lb) conn.update_flow_details(fd)
def test_flow_detail_update_not_existing(self): lb_id = uuidutils.generate_uuid() lb_name = 'lb-%s' % (lb_id) lb = logbook.LogBook(name=lb_name, uuid=lb_id) fd = logbook.FlowDetail('test', uuid=uuidutils.generate_uuid()) lb.add(fd) with contextlib.closing(self._get_connection()) as conn: conn.save_logbook(lb) fd2 = logbook.FlowDetail('test-2', uuid=uuidutils.generate_uuid()) lb.add(fd2) with contextlib.closing(self._get_connection()) as conn: conn.save_logbook(lb) with contextlib.closing(self._get_connection()) as conn: lb2 = conn.get_logbook(lb.uuid) self.assertIsNotNone(lb2.find(fd.uuid)) self.assertIsNotNone(lb2.find(fd2.uuid))
def test_flow_detail_meta_update(self): lb_id = uuidutils.generate_uuid() lb_name = 'lb-%s' % (lb_id) lb = logbook.LogBook(name=lb_name, uuid=lb_id) fd = logbook.FlowDetail('test', uuid=uuidutils.generate_uuid()) fd.meta = {'test': 42} lb.add(fd) with contextlib.closing(self._get_connection()) as conn: conn.save_logbook(lb) conn.update_flow_details(fd) fd.meta['test'] = 43 with contextlib.closing(self._get_connection()) as conn: conn.update_flow_details(fd) with contextlib.closing(self._get_connection()) as conn: lb2 = conn.get_logbook(lb_id) fd2 = lb2.find(fd.uuid) self.assertEqual(fd2.meta.get('test'), 43)