Esempio n. 1
0
    def submit_task(self, flow_factory, **kwargs):
        """submit a task.

        """
        with self.persistence as persistence:

            with self.driver.job_board(self.jobboard_backend_conf.copy(),
                                       persistence=persistence) as board:

                job_id = uuidutils.generate_uuid()
                job_name = '-'.join([flow_factory.__name__, job_id])
                job_logbook = models.LogBook(job_name)
                flow_detail = models.FlowDetail(job_name,
                                                uuidutils.generate_uuid())
                factory_args = ()
                factory_kwargs = {}
                engines.save_factory_details(flow_detail, flow_factory,
                                             factory_args, factory_kwargs)
                job_logbook.add(flow_detail)
                persistence.get_connection().save_logbook(job_logbook)
                job_details = {'store': kwargs}
                job = board.post(job_name,
                                 book=job_logbook,
                                 details=job_details)
                LOG.info("{0} posted".format(job))
Esempio n. 2
0
    def generate_uuid(self):
        backend = task_backend.get_backend()
        book = models.LogBook(r"{}_{}".format(
            self.name,
            datetime.datetime.now().strftime(
                xdatetime.FORMAT_WITH_SECOND_FOR_PATH)))
        with contextlib.closing(backend.get_connection()) as conn:
            conn.save_logbook(book)

        try:
            create_flow = create_flow_for_kvm
            self._engine = engines.load_from_factory(
                create_flow,
                backend=backend,
                book=book,
                engine='serial',
                factory_args=(self.name, self._id, self._debug))

            self._book_uuid = book.uuid
            return {
                'book_id': book.uuid,
                'flow_id': self._engine.storage.flow_uuid
            }
        except Exception as e:
            _logger.error(
                r'TakeoverKVMEntrance generate_uuid failed {}'.format(e))
            _logger.error('TakeoverKVMEntrance {}'.format(
                traceback.format_exc()))
            with contextlib.closing(backend.get_connection()) as conn:
                conn.destroy_logbook(book.uuid)
            raise e
Esempio n. 3
0
def run_poster():
    # This just posts a single job and then ends...
    print("Starting poster with pid: %s" % ME)
    my_name = "poster-%s" % ME
    persist_backend = persistence_backends.fetch(PERSISTENCE_URI)
    with contextlib.closing(persist_backend):
        with contextlib.closing(persist_backend.get_connection()) as conn:
            conn.upgrade()
        job_backend = job_backends.fetch(my_name,
                                         JB_CONF,
                                         persistence=persist_backend)
        job_backend.connect()
        with contextlib.closing(job_backend):
            # Create information in the persistence backend about the
            # unit of work we want to complete and the factory that
            # can be called to create the tasks that the work unit needs
            # to be done.
            lb = models.LogBook("post-from-%s" % my_name)
            fd = models.FlowDetail("song-from-%s" % my_name,
                                   uuidutils.generate_uuid())
            lb.add(fd)
            with contextlib.closing(persist_backend.get_connection()) as conn:
                conn.save_logbook(lb)
            engines.save_factory_details(fd,
                                         make_bottles, [HOW_MANY_BOTTLES], {},
                                         backend=persist_backend)
            # Post, and be done with it!
            jb = job_backend.post("song-from-%s" % my_name, book=lb)
            print("Posted: %s" % jb)
            print("Goodbye...")
Esempio n. 4
0
    def generate_uuid(self):
        backend = task_backend.get_backend()
        book = models.LogBook(r"{}_{}".format(
            self.name,
            datetime.datetime.now().strftime(
                xdatetime.FORMAT_WITH_SECOND_FOR_PATH)))
        with contextlib.closing(backend.get_connection()) as conn:
            conn.save_logbook(book)

        try:
            self._engine = engines.load_from_factory(
                self._flow_func,
                backend=backend,
                book=book,
                engine='serial',
                factory_args=(self.name, self.task_id))

            self._book_uuid = book.uuid
            return {
                'book_id': book.uuid,
                'flow_id': self._engine.storage.flow_uuid
            }
        except Exception as e:
            _logger.error(r'generate_uuid failed {}'.format(e), exc_info=True)
            with contextlib.closing(backend.get_connection()) as conn:
                conn.destroy_logbook(book.uuid)
            raise e
Esempio n. 5
0
    def test_retry_detail_save_with_task_failure(self):
        lb_id = uuidutils.generate_uuid()
        lb_name = 'lb-%s' % (lb_id)
        lb = models.LogBook(name=lb_name, uuid=lb_id)
        fd = models.FlowDetail('test', uuid=uuidutils.generate_uuid())
        lb.add(fd)
        rd = models.RetryDetail("retry-1", uuid=uuidutils.generate_uuid())
        fail = failure.Failure.from_exception(RuntimeError('fail'))
        rd.results.append((42, {'some-task': fail}))
        fd.add(rd)

        # save it
        with contextlib.closing(self._get_connection()) as conn:
            conn.save_logbook(lb)
            conn.update_flow_details(fd)
            conn.update_atom_details(rd)

        # now read it back
        with contextlib.closing(self._get_connection()) as conn:
            lb2 = conn.get_logbook(lb_id)
        fd2 = lb2.find(fd.uuid)
        rd2 = fd2.find(rd.uuid)
        self.assertIsInstance(rd2, models.RetryDetail)
        fail2 = rd2.results[0][1].get('some-task')
        self.assertIsInstance(fail2, failure.Failure)
        self.assertTrue(fail.matches(fail2))
Esempio n. 6
0
def taskflow_base_worker(arg):
    while True:
        self = _taskflow_queue.get()
        try:
            flow = invt_taskflow_factory[self.category]()
            book = models.LogBook('logbook-%s' % (self.category))
            flow_detail = models.FlowDetail('flowdetail-%s' % (self.category),
                                            str(uuid.uuid4()))
            book.add(flow_detail)
            with contextlib.closing(
                    _taskflow_backend.get_connection()) as conn:
                conn.save_logbook(book)
            self.book_id = book.uuid
            self.flow_id = flow_detail.uuid
            #
            #todo:optimize the engine execution process later
            #use a parallel engine instead of a serial one
            #and may be we could share a executor, please refer to
            #https://docs.openstack.org/taskflow/latest/user/examples.html#sharing-a-thread-pool-executor-in-parallel
            self.engine = engines.load(flow,
                                       backend=_taskflow_backend,
                                       flow_detail=flow_detail,
                                       book=book,
                                       store=self.store)
            self.engine.run()
            self.schedule_status = E3TASKFLOW_SCHEDULE_STATUS_SUCCESSFUL
            if self.callback:
                self.callback(self)
        except Exception as e:
            self.failure = str(traceback.format_exc())
            self.schedule_status = E3TASKFLOW_SCHEDULE_STATUS_FAILED
            if self.callback:
                self.callback(self, e)
        finally:
            self.sync_state()
Esempio n. 7
0
 def test_logbook_add_task_detail(self):
     lb_id = uuidutils.generate_uuid()
     lb_name = 'lb-%s' % (lb_id)
     lb = models.LogBook(name=lb_name, uuid=lb_id)
     fd = models.FlowDetail('test', uuid=uuidutils.generate_uuid())
     td = models.TaskDetail("detail-1", uuid=uuidutils.generate_uuid())
     td.version = '4.2'
     fd.add(td)
     lb.add(fd)
     with contextlib.closing(self._get_connection()) as conn:
         conn.save_logbook(lb)
     with contextlib.closing(self._get_connection()) as conn:
         lb2 = conn.get_logbook(lb_id)
         self.assertEqual(1, len(lb2))
         tasks = 0
         for fd in lb:
             tasks += len(fd)
         self.assertEqual(1, tasks)
     with contextlib.closing(self._get_connection()) as conn:
         lb2 = conn.get_logbook(lb_id)
         fd2 = lb2.find(fd.uuid)
         td2 = fd2.find(td.uuid)
         self.assertIsNot(td2, None)
         self.assertEqual(td2.name, 'detail-1')
         self.assertEqual(td2.version, '4.2')
         self.assertEqual(td2.intention, states.EXECUTE)
Esempio n. 8
0
 def test_logbook_merge_flow_detail(self):
     lb_id = uuidutils.generate_uuid()
     lb_name = 'lb-%s' % (lb_id)
     lb = models.LogBook(name=lb_name, uuid=lb_id)
     fd = models.FlowDetail('test', uuid=uuidutils.generate_uuid())
     lb.add(fd)
     with contextlib.closing(self._get_connection()) as conn:
         conn.save_logbook(lb)
     lb2 = models.LogBook(name=lb_name, uuid=lb_id)
     fd2 = models.FlowDetail('test2', uuid=uuidutils.generate_uuid())
     lb2.add(fd2)
     with contextlib.closing(self._get_connection()) as conn:
         conn.save_logbook(lb2)
     with contextlib.closing(self._get_connection()) as conn:
         lb3 = conn.get_logbook(lb_id)
         self.assertEqual(2, len(lb3))
Esempio n. 9
0
    def test_task_detail_with_failure(self):
        lb_id = uuidutils.generate_uuid()
        lb_name = 'lb-%s' % (lb_id)
        lb = models.LogBook(name=lb_name, uuid=lb_id)
        fd = models.FlowDetail('test', uuid=uuidutils.generate_uuid())
        lb.add(fd)
        td = models.TaskDetail("detail-1", uuid=uuidutils.generate_uuid())

        try:
            raise RuntimeError('Woot!')
        except Exception:
            td.failure = failure.Failure()

        fd.add(td)

        with contextlib.closing(self._get_connection()) as conn:
            conn.save_logbook(lb)
            conn.update_flow_details(fd)
            conn.update_atom_details(td)

        # Read failure back
        with contextlib.closing(self._get_connection()) as conn:
            lb2 = conn.get_logbook(lb_id)
        fd2 = lb2.find(fd.uuid)
        td2 = fd2.find(td.uuid)
        self.assertEqual(td2.failure.exception_str, 'Woot!')
        self.assertIs(td2.failure.check(RuntimeError), RuntimeError)
        self.assertEqual(td2.failure.traceback_str, td.failure.traceback_str)
        self.assertIsInstance(td2, models.TaskDetail)
Esempio n. 10
0
    def test_task_detail_meta_update(self):
        lb_id = uuidutils.generate_uuid()
        lb_name = 'lb-%s' % (lb_id)
        lb = models.LogBook(name=lb_name, uuid=lb_id)
        fd = models.FlowDetail('test', uuid=uuidutils.generate_uuid())
        lb.add(fd)
        td = models.TaskDetail("detail-1", uuid=uuidutils.generate_uuid())
        td.meta = {'test': 42}
        fd.add(td)

        with contextlib.closing(self._get_connection()) as conn:
            conn.save_logbook(lb)
            conn.update_flow_details(fd)
            conn.update_atom_details(td)

        td.meta['test'] = 43
        with contextlib.closing(self._get_connection()) as conn:
            conn.update_atom_details(td)

        with contextlib.closing(self._get_connection()) as conn:
            lb2 = conn.get_logbook(lb_id)
        fd2 = lb2.find(fd.uuid)
        td2 = fd2.find(td.uuid)
        self.assertEqual(td2.meta.get('test'), 43)
        self.assertIsInstance(td2, models.TaskDetail)
Esempio n. 11
0
    def test_retry_detail_save_intention(self):
        lb_id = uuidutils.generate_uuid()
        lb_name = 'lb-%s' % (lb_id)
        lb = models.LogBook(name=lb_name, uuid=lb_id)
        fd = models.FlowDetail('test', uuid=uuidutils.generate_uuid())
        lb.add(fd)
        rd = models.RetryDetail("retry-1", uuid=uuidutils.generate_uuid())
        fd.add(rd)

        # save it
        with contextlib.closing(self._get_connection()) as conn:
            conn.save_logbook(lb)
            conn.update_flow_details(fd)
            conn.update_atom_details(rd)

        # change intention and save
        rd.intention = states.REVERT
        with contextlib.closing(self._get_connection()) as conn:
            conn.update_atom_details(rd)

        # now read it back
        with contextlib.closing(self._get_connection()) as conn:
            lb2 = conn.get_logbook(lb_id)
        fd2 = lb2.find(fd.uuid)
        rd2 = fd2.find(rd.uuid)
        self.assertEqual(rd2.intention, states.REVERT)
        self.assertIsInstance(rd2, models.RetryDetail)
Esempio n. 12
0
def temporary_log_book(backend=None):
    """Creates a temporary logbook for temporary usage in the given backend.

    Mainly useful for tests and other use cases where a temporary logbook
    is needed for a short-period of time.
    """
    book = models.LogBook('tmp')
    if backend is not None:
        with contextlib.closing(backend.get_connection()) as conn:
            conn.save_logbook(book)
    return book
Esempio n. 13
0
 def test_logbook_lazy_fetch(self):
     lb_id = uuidutils.generate_uuid()
     lb_name = 'lb-%s' % (lb_id)
     lb = models.LogBook(name=lb_name, uuid=lb_id)
     fd = models.FlowDetail('test', uuid=uuidutils.generate_uuid())
     lb.add(fd)
     with contextlib.closing(self._get_connection()) as conn:
         conn.save_logbook(lb)
     with contextlib.closing(self._get_connection()) as conn:
         lb2 = conn.get_logbook(lb_id, lazy=True)
         self.assertEqual(0, len(lb2))
         self.assertEqual(1, len(lb))
Esempio n. 14
0
 def test_logbook_add_flow_detail(self):
     lb_id = uuidutils.generate_uuid()
     lb_name = 'lb-%s' % (lb_id)
     lb = models.LogBook(name=lb_name, uuid=lb_id)
     fd = models.FlowDetail('test', uuid=uuidutils.generate_uuid())
     lb.add(fd)
     with contextlib.closing(self._get_connection()) as conn:
         conn.save_logbook(lb)
     with contextlib.closing(self._get_connection()) as conn:
         lb2 = conn.get_logbook(lb_id)
         self.assertEqual(1, len(lb2))
         self.assertEqual(1, len(lb))
         self.assertEqual(fd.name, lb2.find(fd.uuid).name)
Esempio n. 15
0
 def test_logbook_delete(self):
     lb_id = uuidutils.generate_uuid()
     lb_name = 'lb-%s' % (lb_id)
     lb = models.LogBook(name=lb_name, uuid=lb_id)
     with contextlib.closing(self._get_connection()) as conn:
         self.assertRaises(exc.NotFound, conn.destroy_logbook, lb_id)
     with contextlib.closing(self._get_connection()) as conn:
         conn.save_logbook(lb)
     with contextlib.closing(self._get_connection()) as conn:
         lb2 = conn.get_logbook(lb_id)
         self.assertIsNotNone(lb2)
     with contextlib.closing(self._get_connection()) as conn:
         conn.destroy_logbook(lb_id)
         self.assertRaises(exc.NotFound, conn.destroy_logbook, lb_id)
Esempio n. 16
0
def ensure_logbook_exists():
    """
    Ensure the configured logbook exists in the persistence backend
    :return obj logbook: The retrieved or created logbook
    """
    log.debug("Ensuring logbook {} exists in "
              "persistence backend".format(LOGBOOK_NAME))
    with persistence_backend_connection() as conn:
        conn.upgrade()
        try:
            logbook = get_logbook_by_name(LOGBOOK_NAME, conn)
        except StopIteration:
            logbook = persistence_models.LogBook(LOGBOOK_NAME)
            conn.save_logbook(logbook)
    return logbook
Esempio n. 17
0
 def test_flow_detail_lazy_fetch(self):
     lb_id = uuidutils.generate_uuid()
     lb_name = 'lb-%s' % (lb_id)
     lb = models.LogBook(name=lb_name, uuid=lb_id)
     fd = models.FlowDetail('test', uuid=uuidutils.generate_uuid())
     td = models.TaskDetail("detail-1", uuid=uuidutils.generate_uuid())
     td.version = '4.2'
     fd.add(td)
     lb.add(fd)
     with contextlib.closing(self._get_connection()) as conn:
         conn.save_logbook(lb)
     with contextlib.closing(self._get_connection()) as conn:
         fd2 = conn.get_flow_details(fd.uuid, lazy=True)
         self.assertEqual(0, len(fd2))
         self.assertEqual(1, len(fd))
Esempio n. 18
0
    def post(self,
             flow_factory,
             job_args=None,
             flow_args=None,
             flow_kwargs=None,
             tx_uuid=None):
        """Method for posting a new job to the jobboard

        :param flow_factory: Flow factory function for creating a flow instance
                             that will be executed as part of the job.
        :param job_args: 'store' arguments to be supplied to the engine
                         executing the flow for the job
        :param flow_args: Positional arguments to be passed to the flow factory
                          function
        :param flow_kwargs: Keyword arguments to be passed to the flow factory
                            function
        :param tx_uuid: Transaction UUID which will be injected as 'tx_uuid' in
                        job_args.  A tx_uuid will be generated if one is not
                        provided as an argument.
        :return: A taskflow.job.Job instance that represents the job that was
                 posted.
        """
        if isinstance(job_args, dict) and 'tx_uuid' in job_args:
            raise AttributeError("tx_uuid needs to be provided as an argument"
                                 "to Client.post, not as a member of job_args")

        if tx_uuid is None:
            tx_uuid = uuidutils.generate_uuid()

        job_name = "%s[%s]" % (flow_factory.__name__, tx_uuid)
        book = persistence_models.LogBook(job_name, uuid=tx_uuid)

        if flow_factory is not None:
            flow_detail = persistence_models.FlowDetail(
                job_name, str(uuid.uuid4()))
            book.add(flow_detail)

        job_details = {'store': job_args or {}}
        job_details['store'].update({'tx_uuid': tx_uuid})
        job_details['flow_uuid'] = flow_detail.uuid

        self.persistence.get_connection().save_logbook(book)

        engines.save_factory_details(flow_detail, flow_factory, flow_args,
                                     flow_kwargs, self.persistence)

        job = self.jobboard.post(job_name, book, details=job_details)
        return job
Esempio n. 19
0
def load_taskflow_into_engine(action, nested_flow,
                              process_what):
    book = None
    backend = None
    if PERSISTENCE_BACKEND:
        backend = backends.fetch(PERSISTENCE_BACKEND)
        with contextlib.closing(backend.get_connection()) as conn:
            try:
                book = conn.get_logbook(process_what['notification_uuid'])
            except exceptions.NotFound:
                pass
            if book is None:
                book = models.LogBook(action,
                                      process_what['notification_uuid'])

    return taskflow.engines.load(nested_flow, store=process_what,
                                 backend=backend, book=book)
Esempio n. 20
0
    def make_save_book(persistence,
                       job_id,
                       flow_plugin,
                       plugin_args=(),
                       plugin_kwds={}):
        flow_id = book_id = job_id  # Do these need to be different?
        book = models.LogBook(book_id)
        detail = models.FlowDetail(flow_id, uuidutils.generate_uuid())
        book.add(detail)

        factory_args = [flow_plugin] + list(plugin_args)
        factory_kwargs = plugin_kwds
        engines.save_factory_details(detail, workflow_factory, factory_args,
                                     factory_kwargs)
        with contextlib.closing(persistence.get_connection()) as conn:
            conn.save_logbook(book)
            return book
Esempio n. 21
0
    def test_flow_detail_save(self):
        lb_id = uuidutils.generate_uuid()
        lb_name = 'lb-%s' % (lb_id)
        lb = models.LogBook(name=lb_name, uuid=lb_id)
        fd = models.FlowDetail('test', uuid=uuidutils.generate_uuid())
        lb.add(fd)

        # Ensure we can't save it since its owning logbook hasn't been
        # saved (flow details can not exist on their own without a connection
        # to a logbook).
        with contextlib.closing(self._get_connection()) as conn:
            self.assertRaises(exc.NotFound, conn.get_logbook, lb_id)
            self.assertRaises(exc.NotFound, conn.update_flow_details, fd)

        # Ok now we should be able to save both.
        with contextlib.closing(self._get_connection()) as conn:
            conn.save_logbook(lb)
            conn.update_flow_details(fd)
Esempio n. 22
0
    def test_flow_detail_update_not_existing(self):
        lb_id = uuidutils.generate_uuid()
        lb_name = 'lb-%s' % (lb_id)
        lb = models.LogBook(name=lb_name, uuid=lb_id)
        fd = models.FlowDetail('test', uuid=uuidutils.generate_uuid())
        lb.add(fd)
        with contextlib.closing(self._get_connection()) as conn:
            conn.save_logbook(lb)

        fd2 = models.FlowDetail('test-2', uuid=uuidutils.generate_uuid())
        lb.add(fd2)
        with contextlib.closing(self._get_connection()) as conn:
            conn.save_logbook(lb)

        with contextlib.closing(self._get_connection()) as conn:
            lb2 = conn.get_logbook(lb.uuid)
        self.assertIsNotNone(lb2.find(fd.uuid))
        self.assertIsNotNone(lb2.find(fd2.uuid))
Esempio n. 23
0
 def issue(self, auto_sync=True):
     try:
         self.guard.acquire()
         if self.schedule_status != E3TASKFLOW_SCHEDULE_STATUS_UNKNOWN:
             return True
         if self.sync:
             #create the flow using registered flow creator
             flow = invt_taskflow_factory[self.category]()
             book = models.LogBook('logbook-%s' % (self.category))
             flow_detail = models.FlowDetail(
                 'flowdetail-%s' % (self.category), str(uuid.uuid4()))
             book.add(flow_detail)
             with contextlib.closing(
                     _taskflow_backend.get_connection()) as conn:
                 conn.save_logbook(book)
             self.book_id = book.uuid
             self.flow_id = flow_detail.uuid
             self.engine = engines.load(flow,
                                        backend=_taskflow_backend,
                                        flow_detail=flow_detail,
                                        book=book,
                                        store=self.store)
             self.schedule_status = E3TASKFLOW_SCHEDULE_STATUS_ISSUED
             #prior to the flow,synchronize state in case the tasks need it
             if auto_sync:
                 self.sync_state()
             self.engine.run()
             self.schedule_status = E3TASKFLOW_SCHEDULE_STATUS_SUCCESSFUL
             if self.callback:
                 self.callback(self)
         else:
             self.schedule_status = E3TASKFLOW_SCHEDULE_STATUS_ISSUED
             _taskflow_queue.put(self)
     except Exception as e:
         self.failure = str(traceback.format_exc())
         self.schedule_status = E3TASKFLOW_SCHEDULE_STATUS_FAILED
         if self.callback:
             self.callback(self, e)
         else:
             raise e
     finally:
         self.guard.release()
         if auto_sync:
             self.sync_state()
Esempio n. 24
0
 def test_dir_backend_cache_overfill(self):
     if self.max_cache_size is not None:
         # Ensure cache never goes past the desired max size...
         books_ids_made = []
         with contextlib.closing(self._get_connection()) as conn:
             for i in range(0, int(1.5 * self.max_cache_size)):
                 lb_name = 'book-%s' % (i)
                 lb_id = uuidutils.generate_uuid()
                 lb = models.LogBook(name=lb_name, uuid=lb_id)
                 self.assertRaises(exc.NotFound, conn.get_logbook, lb_id)
                 conn.save_logbook(lb)
                 books_ids_made.append(lb_id)
                 self.assertLessEqual(self.backend.file_cache.currsize,
                                      self.max_cache_size)
         # Also ensure that we can still read all created books...
         with contextlib.closing(self._get_connection()) as conn:
             for lb_id in books_ids_made:
                 lb = conn.get_logbook(lb_id)
                 self.assertIsNotNone(lb)
Esempio n. 25
0
    def test_flow_detail_meta_update(self):
        lb_id = uuidutils.generate_uuid()
        lb_name = 'lb-%s' % (lb_id)
        lb = models.LogBook(name=lb_name, uuid=lb_id)
        fd = models.FlowDetail('test', uuid=uuidutils.generate_uuid())
        fd.meta = {'test': 42}
        lb.add(fd)

        with contextlib.closing(self._get_connection()) as conn:
            conn.save_logbook(lb)
            conn.update_flow_details(fd)

        fd.meta['test'] = 43
        with contextlib.closing(self._get_connection()) as conn:
            conn.update_flow_details(fd)
        with contextlib.closing(self._get_connection()) as conn:
            lb2 = conn.get_logbook(lb_id)
        fd2 = lb2.find(fd.uuid)
        self.assertEqual(43, fd2.meta.get('test'))
Esempio n. 26
0
    def test_logbook_save_retrieve_many(self):
        lb_ids = {}
        for i in range(0, 10):
            lb_id = uuidutils.generate_uuid()
            lb_name = 'lb-%s-%s' % (i, lb_id)
            lb = models.LogBook(name=lb_name, uuid=lb_id)
            lb_ids[lb_id] = True

            # Should not already exist
            with contextlib.closing(self._get_connection()) as conn:
                self.assertRaises(exc.NotFound, conn.get_logbook, lb_id)
                conn.save_logbook(lb)

        # Now fetch them all
        with contextlib.closing(self._get_connection()) as conn:
            lbs = conn.get_logbooks()
            for lb in lbs:
                self.assertIn(lb.uuid, lb_ids)
                lb_ids.pop(lb.uuid)
            self.assertEqual(0, len(lb_ids))
Esempio n. 27
0
def _ensure_db_initialized(conn, flow):
    conn.upgrade()

    need_save = False
    try:
        logbook = conn.get_logbook(LOGBOOK_ID)
    except exceptions.NotFound:
        logbook = models.LogBook(LOGBOOK_ID, uuid=LOGBOOK_ID)
        need_save = True

    flow_detail = logbook.find(flow.name)
    if flow_detail is None:
        flow_detail = models.FlowDetail(flow.name, flow.name)
        logbook.add(flow_detail)
        need_save = True

    if need_save:
        conn.save_logbook(logbook)

    return logbook, flow_detail
Esempio n. 28
0
    def test_task_detail_save(self):
        lb_id = uuidutils.generate_uuid()
        lb_name = 'lb-%s' % (lb_id)
        lb = models.LogBook(name=lb_name, uuid=lb_id)
        fd = models.FlowDetail('test', uuid=uuidutils.generate_uuid())
        lb.add(fd)
        td = models.TaskDetail("detail-1", uuid=uuidutils.generate_uuid())
        fd.add(td)

        # Ensure we can't save it since its owning logbook hasn't been
        # saved (flow details/task details can not exist on their own without
        # their parent existing).
        with contextlib.closing(self._get_connection()) as conn:
            self.assertRaises(exc.NotFound, conn.update_flow_details, fd)
            self.assertRaises(exc.NotFound, conn.update_atom_details, td)

        # Ok now we should be able to save them.
        with contextlib.closing(self._get_connection()) as conn:
            conn.save_logbook(lb)
            conn.update_flow_details(fd)
            conn.update_atom_details(td)
Esempio n. 29
0
    def test_logbook_save_retrieve(self):
        lb_id = uuidutils.generate_uuid()
        lb_meta = {'1': 2}
        lb_name = 'lb-%s' % (lb_id)
        lb = models.LogBook(name=lb_name, uuid=lb_id)
        lb.meta = lb_meta

        # Should not already exist
        with contextlib.closing(self._get_connection()) as conn:
            self.assertRaises(exc.NotFound, conn.get_logbook, lb_id)
            conn.save_logbook(lb)

        # Make sure we can reload it (and all of its attributes are what
        # we expect them to be).
        with contextlib.closing(self._get_connection()) as conn:
            lb = conn.get_logbook(lb_id)
        self.assertEqual(lb_name, lb.name)
        self.assertEqual(0, len(lb))
        self.assertEqual(lb_meta, lb.meta)
        self.assertIsNone(lb.updated_at)
        self.assertIsNotNone(lb.created_at)
Esempio n. 30
0
    def run_poster(self, flow_factory, *args, wait=False, **kwargs):
        with self.driver.persistence_driver.get_persistence() as persistence:
            with self.driver.job_board(persistence) as job_board:
                job_id = uuidutils.generate_uuid()
                job_name = '-'.join([flow_factory.__name__, job_id])
                job_logbook = models.LogBook(job_name)
                flow_detail = models.FlowDetail(job_name, job_id)
                job_details = {'store': kwargs.pop('store')}
                job_logbook.add(flow_detail)
                persistence.get_connection().save_logbook(job_logbook)
                engines.save_factory_details(flow_detail,
                                             flow_factory,
                                             args,
                                             kwargs,
                                             backend=persistence)

                job_board.post(job_name, book=job_logbook, details=job_details)
                if wait:
                    self._wait_for_job(job_board)

                return job_id