Exemplo n.º 1
0
 def _save_flow_details(self, flow_detail, ignore_missing):
     # See if we have an existing flow detail to merge with.
     e_fd = None
     try:
         e_fd = self._get_flow_details(flow_detail.uuid, lock=False)
     except EnvironmentError:
         if not ignore_missing:
             raise exc.NotFound("No flow details found with id: %s"
                                % flow_detail.uuid)
     if e_fd is not None:
         e_fd = p_utils.flow_details_merge(e_fd, flow_detail)
         for td in flow_detail:
             if e_fd.find(td.uuid) is None:
                 e_fd.add(td)
         flow_detail = e_fd
     flow_path = os.path.join(self._flow_path, flow_detail.uuid)
     misc.ensure_tree(flow_path)
     self._write_to(
         os.path.join(flow_path, 'metadata'),
         jsonutils.dumps(p_utils.format_flow_detail(flow_detail)))
     if len(flow_detail):
         task_path = os.path.join(flow_path, 'tasks')
         misc.ensure_tree(task_path)
         self._run_with_process_lock('task',
                                     self._save_tasks_and_link,
                                     list(flow_detail), task_path)
     return flow_detail
Exemplo n.º 2
0
    def _update_flow_details(self, fd, txn, create_missing=False):
        # Determine whether the desired data exists or not
        fd_path = paths.join(self.flow_path, fd.uuid)
        try:
            fd_data, _zstat = self._client.get(fd_path)
        except k_exc.NoNodeError:
            # Not-existent: create or raise exception
            if create_missing:
                txn.create(fd_path)
                e_fd = logbook.FlowDetail(name=fd.name, uuid=fd.uuid)
            else:
                raise exc.NotFound("No flow details found with id: %s"
                                   % fd.uuid)
        else:
            # Existent: read it out
            e_fd = logbook.FlowDetail.from_dict(misc.decode_json(fd_data))

        # Update and write it back
        e_fd = e_fd.merge(fd)
        fd_data = e_fd.to_dict()
        txn.set_data(fd_path, misc.binary_encode(jsonutils.dumps(fd_data)))
        for ad in fd:
            ad_path = paths.join(fd_path, ad.uuid)
            # NOTE(harlowja): create an entry in the flow detail path
            # for the provided atom detail so that a reference exists
            # from the flow detail to its atom details.
            if not self._client.exists(ad_path):
                txn.create(ad_path)
            e_fd.add(self._update_atom_details(ad, txn, create_missing=True))
        return e_fd
Exemplo n.º 3
0
 def _save_logbook(self, book):
     # See if we have an existing logbook to merge with.
     e_lb = None
     try:
         e_lb = self._get_logbook(book.uuid)
     except exc.NotFound:
         pass
     if e_lb is not None:
         e_lb = p_utils.logbook_merge(e_lb, book)
         for fd in book:
             if e_lb.find(fd.uuid) is None:
                 e_lb.add(fd)
         book = e_lb
     book_path = os.path.join(self._book_path, book.uuid)
     misc.ensure_tree(book_path)
     created_at = None
     if e_lb is not None:
         created_at = e_lb.created_at
     self._write_to(os.path.join(book_path, 'metadata'), jsonutils.dumps(
         p_utils.format_logbook(book, created_at=created_at)))
     if len(book):
         flow_path = os.path.join(book_path, 'flows')
         misc.ensure_tree(flow_path)
         self._run_with_process_lock('flow',
                                     self._save_flows_and_link,
                                     list(book), flow_path)
     return book
Exemplo n.º 4
0
 def _save_logbook(self, book):
     # See if we have an existing logbook to merge with.
     e_lb = None
     try:
         e_lb = self._get_logbook(book.uuid)
     except exc.NotFound:
         pass
     if e_lb is not None:
         e_lb = p_utils.logbook_merge(e_lb, book)
         for fd in book:
             if e_lb.find(fd.uuid) is None:
                 e_lb.add(fd)
         book = e_lb
     book_path = os.path.join(self._book_path, book.uuid)
     misc.ensure_tree(book_path)
     created_at = None
     if e_lb is not None:
         created_at = e_lb.created_at
     self._write_to(os.path.join(book_path, 'metadata'), jsonutils.dumps(
         p_utils.format_logbook(book, created_at=created_at)))
     if len(book):
         flow_path = os.path.join(book_path, 'flows')
         misc.ensure_tree(flow_path)
         self._run_with_process_lock('flow',
                                     self._save_flows_and_link,
                                     list(book), flow_path)
     return book
Exemplo n.º 5
0
    def _update_atom_details(self, ad, txn, create_missing=False):
        # Determine whether the desired data exists or not.
        ad_path = paths.join(self.atom_path, ad.uuid)
        e_ad = None
        try:
            ad_data, _zstat = self._client.get(ad_path)
        except k_exc.NoNodeError:
            # Not-existent: create or raise exception.
            raise exc.NotFound("No atom details found with id: %s" % ad.uuid)
        else:
            # Existent: read it out.
            try:
                ad_data = misc.decode_json(ad_data)
                ad_cls = logbook.atom_detail_class(ad_data['type'])
                e_ad = ad_cls.from_dict(ad_data['atom'])
            except KeyError:
                pass

        # Update and write it back
        if e_ad:
            e_ad = e_ad.merge(ad)
        else:
            e_ad = ad
        ad_data = base._format_atom(e_ad)
        txn.set_data(ad_path,
                     misc.binary_encode(jsonutils.dumps(ad_data)))
        return e_ad
Exemplo n.º 6
0
    def format(self, record):
        message = {
            'message': record.getMessage(),
            'asctime': self.formatTime(record, self.datefmt),
            'name': record.name,
            'msg': record.msg,
            'args': record.args,
            'levelname': record.levelname,
            'levelno': record.levelno,
            'pathname': record.pathname,
            'filename': record.filename,
            'module': record.module,
            'lineno': record.lineno,
            'funcname': record.funcName,
            'created': record.created,
            'msecs': record.msecs,
            'relative_created': record.relativeCreated,
            'thread': record.thread,
            'thread_name': record.threadName,
            'process_name': record.processName,
            'process': record.process,
            'traceback': None
        }

        if hasattr(record, 'extra'):
            message['extra'] = record.extra

        if record.exc_info:
            message['traceback'] = self.formatException(record.exc_info)

        return jsonutils.dumps(message)
Exemplo n.º 7
0
    def format(self, record):
        message = {'message': record.getMessage(),
                   'asctime': self.formatTime(record, self.datefmt),
                   'name': record.name,
                   'msg': record.msg,
                   'args': record.args,
                   'levelname': record.levelname,
                   'levelno': record.levelno,
                   'pathname': record.pathname,
                   'filename': record.filename,
                   'module': record.module,
                   'lineno': record.lineno,
                   'funcname': record.funcName,
                   'created': record.created,
                   'msecs': record.msecs,
                   'relative_created': record.relativeCreated,
                   'thread': record.thread,
                   'thread_name': record.threadName,
                   'process_name': record.processName,
                   'process': record.process,
                   'traceback': None}

        if hasattr(record, 'extra'):
            message['extra'] = record.extra

        if record.exc_info:
            message['traceback'] = self.formatException(record.exc_info)

        return jsonutils.dumps(message)
Exemplo n.º 8
0
 def _save_flow_details(self, flow_detail, ignore_missing):
     # See if we have an existing flow detail to merge with.
     e_fd = None
     try:
         e_fd = self._get_flow_details(flow_detail.uuid, lock=False)
     except EnvironmentError:
         if not ignore_missing:
             raise exc.NotFound("No flow details found with id: %s"
                                % flow_detail.uuid)
     if e_fd is not None:
         e_fd = p_utils.flow_details_merge(e_fd, flow_detail)
         for td in flow_detail:
             if e_fd.find(td.uuid) is None:
                 e_fd.add(td)
         flow_detail = e_fd
     flow_path = os.path.join(self._flow_path, flow_detail.uuid)
     misc.ensure_tree(flow_path)
     self._write_to(
         os.path.join(flow_path, 'metadata'),
         jsonutils.dumps(p_utils.format_flow_detail(flow_detail)))
     if len(flow_detail):
         task_path = os.path.join(flow_path, 'tasks')
         misc.ensure_tree(task_path)
         self._run_with_process_lock('task',
                                     self._save_tasks_and_link,
                                     list(flow_detail), task_path)
     return flow_detail
Exemplo n.º 9
0
    def _update_flow_details(self, fd, txn, create_missing=False):
        # Determine whether the desired data exists or not
        fd_path = paths.join(self.flow_path, fd.uuid)
        try:
            fd_data, _zstat = self._client.get(fd_path)
        except k_exc.NoNodeError:
            # Not-existent: create or raise exception
            if create_missing:
                txn.create(fd_path)
                e_fd = logbook.FlowDetail(name=fd.name, uuid=fd.uuid)
            else:
                raise exc.NotFound("No flow details found with id: %s" %
                                   fd.uuid)
        else:
            # Existent: read it out
            e_fd = p_utils.unformat_flow_detail(fd.uuid,
                                                misc.decode_json(fd_data))

        # Update and write it back
        e_fd = p_utils.flow_details_merge(e_fd, fd)
        fd_data = p_utils.format_flow_detail(e_fd)
        txn.set_data(fd_path, misc.binary_encode(jsonutils.dumps(fd_data)))
        for td in fd:
            td_path = paths.join(fd_path, td.uuid)
            # NOTE(harlowja): create an entry in the flow detail path
            # for the provided task detail so that a reference exists
            # from the flow detail to its task details.
            if not self._client.exists(td_path):
                txn.create(td_path)
            e_fd.add(self._update_task_details(td, txn, create_missing=True))
        return e_fd
Exemplo n.º 10
0
 def _create_logbook(lb_path, txn):
     lb_data = p_utils.format_logbook(lb, created_at=None)
     txn.create(lb_path, misc.binary_encode(jsonutils.dumps(lb_data)))
     for fd in lb:
         # NOTE(harlowja): create an entry in the logbook path
         # for the provided flow detail so that a reference exists
         # from the logbook to its flow details.
         txn.create(paths.join(lb_path, fd.uuid))
         fd_path = paths.join(self.flow_path, fd.uuid)
         fd_data = jsonutils.dumps(p_utils.format_flow_detail(fd))
         txn.create(fd_path, misc.binary_encode(fd_data))
         for td in fd:
             # NOTE(harlowja): create an entry in the flow detail path
             # for the provided task detail so that a reference exists
             # from the flow detail to its task details.
             txn.create(paths.join(fd_path, td.uuid))
             td_path = paths.join(self.task_path, td.uuid)
             td_data = jsonutils.dumps(p_utils.format_task_detail(td))
             txn.create(td_path, misc.binary_encode(td_data))
     return lb
Exemplo n.º 11
0
 def _create_logbook(lb_path, txn):
     lb_data = p_utils.format_logbook(lb, created_at=None)
     txn.create(lb_path, misc.binary_encode(jsonutils.dumps(lb_data)))
     for fd in lb:
         # NOTE(harlowja): create an entry in the logbook path
         # for the provided flow detail so that a reference exists
         # from the logbook to its flow details.
         txn.create(paths.join(lb_path, fd.uuid))
         fd_path = paths.join(self.flow_path, fd.uuid)
         fd_data = jsonutils.dumps(p_utils.format_flow_detail(fd))
         txn.create(fd_path, misc.binary_encode(fd_data))
         for td in fd:
             # NOTE(harlowja): create an entry in the flow detail path
             # for the provided task detail so that a reference exists
             # from the flow detail to its task details.
             txn.create(paths.join(fd_path, td.uuid))
             td_path = paths.join(self.task_path, td.uuid)
             td_data = jsonutils.dumps(p_utils.format_task_detail(td))
             txn.create(td_path, misc.binary_encode(td_data))
     return lb
Exemplo n.º 12
0
 def _create_logbook(lb_path, txn):
     lb_data = lb.to_dict(marshal_time=True)
     txn.create(lb_path, misc.binary_encode(jsonutils.dumps(lb_data)))
     for fd in lb:
         # NOTE(harlowja): create an entry in the logbook path
         # for the provided flow detail so that a reference exists
         # from the logbook to its flow details.
         txn.create(paths.join(lb_path, fd.uuid))
         fd_path = paths.join(self.flow_path, fd.uuid)
         fd_data = jsonutils.dumps(fd.to_dict())
         txn.create(fd_path, misc.binary_encode(fd_data))
         for ad in fd:
             # NOTE(harlowja): create an entry in the flow detail path
             # for the provided atom detail so that a reference exists
             # from the flow detail to its atom details.
             txn.create(paths.join(fd_path, ad.uuid))
             ad_path = paths.join(self.atom_path, ad.uuid)
             ad_data = base._format_atom(ad)
             txn.create(ad_path,
                        misc.binary_encode(jsonutils.dumps(ad_data)))
     return lb
Exemplo n.º 13
0
 def _format_job(self, job):
     posting = {
         'uuid': job.uuid,
         'name': job.name,
     }
     if job.details is not None:
         posting['details'] = job.details
     if job.book is not None:
         posting['book'] = {
             'name': job.book.name,
             'uuid': job.book.uuid,
         }
     return misc.binary_encode(jsonutils.dumps(posting))
Exemplo n.º 14
0
 def _format_job(self, job):
     posting = {
         'uuid': job.uuid,
         'name': job.name,
     }
     if job.details is not None:
         posting['details'] = job.details
     if job.book is not None:
         posting['book'] = {
             'name': job.book.name,
             'uuid': job.book.uuid,
         }
     return misc.binary_encode(jsonutils.dumps(posting))
Exemplo n.º 15
0
 def _save_atom_details(self, atom_detail, ignore_missing):
     # See if we have an existing atom detail to merge with.
     e_ad = None
     try:
         e_ad = self._get_atom_details(atom_detail.uuid, lock=False)
     except EnvironmentError:
         if not ignore_missing:
             raise exc.NotFound("No atom details found with id: %s" % atom_detail.uuid)
     if e_ad is not None:
         atom_detail = e_ad.merge(atom_detail)
     ad_path = os.path.join(self._atom_path, atom_detail.uuid)
     ad_data = base._format_atom(atom_detail)
     self._write_to(ad_path, jsonutils.dumps(ad_data))
     return atom_detail
Exemplo n.º 16
0
 def _update_logbook(lb_path, lb_data, txn):
     e_lb = p_utils.unformat_logbook(lb.uuid, misc.decode_json(lb_data))
     e_lb = p_utils.logbook_merge(e_lb, lb)
     lb_data = p_utils.format_logbook(e_lb, created_at=lb.created_at)
     txn.set_data(lb_path, misc.binary_encode(jsonutils.dumps(lb_data)))
     for fd in lb:
         fd_path = paths.join(lb_path, fd.uuid)
         if not self._client.exists(fd_path):
             # NOTE(harlowja): create an entry in the logbook path
             # for the provided flow detail so that a reference exists
             # from the logbook to its flow details.
             txn.create(fd_path)
         e_fd = self._update_flow_details(fd, txn, create_missing=True)
         e_lb.add(e_fd)
     return e_lb
Exemplo n.º 17
0
 def _save_task_details(self, task_detail, ignore_missing):
     # See if we have an existing task detail to merge with.
     e_td = None
     try:
         e_td = self._get_task_details(task_detail.uuid, lock=False)
     except EnvironmentError:
         if not ignore_missing:
             raise exc.NotFound("No task details found with id: %s"
                                % task_detail.uuid)
     if e_td is not None:
         task_detail = p_utils.task_details_merge(e_td, task_detail)
     td_path = os.path.join(self._task_path, task_detail.uuid)
     td_data = p_utils.format_task_detail(task_detail)
     self._write_to(td_path, jsonutils.dumps(td_data))
     return task_detail
Exemplo n.º 18
0
 def _update_logbook(lb_path, lb_data, txn):
     e_lb = p_utils.unformat_logbook(lb.uuid, misc.decode_json(lb_data))
     e_lb = p_utils.logbook_merge(e_lb, lb)
     lb_data = p_utils.format_logbook(e_lb, created_at=lb.created_at)
     txn.set_data(lb_path, misc.binary_encode(jsonutils.dumps(lb_data)))
     for fd in lb:
         fd_path = paths.join(lb_path, fd.uuid)
         if not self._client.exists(fd_path):
             # NOTE(harlowja): create an entry in the logbook path
             # for the provided flow detail so that a reference exists
             # from the logbook to its flow details.
             txn.create(fd_path)
         e_fd = self._update_flow_details(fd, txn, create_missing=True)
         e_lb.add(e_fd)
     return e_lb
Exemplo n.º 19
0
 def _save_task_details(self, task_detail, ignore_missing):
     # See if we have an existing task detail to merge with.
     e_td = None
     try:
         e_td = self._get_task_details(task_detail.uuid, lock=False)
     except EnvironmentError:
         if not ignore_missing:
             raise exc.NotFound("No task details found with id: %s"
                                % task_detail.uuid)
     if e_td is not None:
         task_detail = p_utils.task_details_merge(e_td, task_detail)
     td_path = os.path.join(self._task_path, task_detail.uuid)
     td_data = p_utils.format_task_detail(task_detail)
     self._write_to(td_path, jsonutils.dumps(td_data))
     return task_detail
Exemplo n.º 20
0
 def _update_logbook(lb_path, lb_data, txn):
     e_lb = logbook.LogBook.from_dict(misc.decode_json(lb_data),
                                      unmarshal_time=True)
     e_lb = e_lb.merge(lb)
     lb_data = e_lb.to_dict(marshal_time=True)
     txn.set_data(lb_path, misc.binary_encode(jsonutils.dumps(lb_data)))
     for fd in lb:
         fd_path = paths.join(lb_path, fd.uuid)
         if not self._client.exists(fd_path):
             # NOTE(harlowja): create an entry in the logbook path
             # for the provided flow detail so that a reference exists
             # from the logbook to its flow details.
             txn.create(fd_path)
         e_fd = self._update_flow_details(fd, txn, create_missing=True)
         e_lb.add(e_fd)
     return e_lb
Exemplo n.º 21
0
    def claim(self, job, who):
        def _unclaimable_try_find_owner(cause):
            try:
                owner = self.find_owner(job)
            except Exception:
                owner = None
            if owner:
                msg = "Job %s already claimed by '%s'" % (job.uuid, owner)
            else:
                msg = "Job %s already claimed" % (job.uuid)
            return excp.UnclaimableJob(msg, cause)

        _check_who(who)
        with self._wrap(job.uuid, job.path, "Claiming failure: %s"):
            # NOTE(harlowja): post as json which will allow for future changes
            # more easily than a raw string/text.
            value = jsonutils.dumps({
                'owner': who,
            })
            # Ensure the target job is still existent (at the right version).
            job_data, job_stat = self._client.get(job.path)
            txn = self._client.transaction()
            # This will abort (and not create the lock) if the job has been
            # removed (somehow...) or updated by someone else to a different
            # version...
            txn.check(job.path, version=job_stat.version)
            txn.create(job.lock_path, value=misc.binary_encode(value),
                       ephemeral=True)
            try:
                kazoo_utils.checked_commit(txn)
            except k_exceptions.NodeExistsError as e:
                raise _unclaimable_try_find_owner(e)
            except kazoo_utils.KazooTransactionException as e:
                if len(e.failures) < 2:
                    raise
                else:
                    if isinstance(e.failures[0], k_exceptions.NoNodeError):
                        raise excp.NotFound(
                            "Job %s not found to be claimed" % job.uuid,
                            e.failures[0])
                    if isinstance(e.failures[1], k_exceptions.NodeExistsError):
                        raise _unclaimable_try_find_owner(e.failures[1])
                    else:
                        raise excp.UnclaimableJob(
                            "Job %s claim failed due to transaction"
                            " not succeeding" % (job.uuid), e)
Exemplo n.º 22
0
    def post(self, name, book, details=None):

        def format_posting(job_uuid):
            posting = {
                'uuid': job_uuid,
                'name': name,
            }
            if details:
                posting['details'] = details
            else:
                posting['details'] = {}
            if book is not None:
                posting['book'] = {
                    'name': book.name,
                    'uuid': book.uuid,
                }
            return posting

        # NOTE(harlowja): Jobs are not ephemeral, they will persist until they
        # are consumed (this may change later, but seems safer to do this until
        # further notice).
        job_uuid = uuidutils.generate_uuid()
        with self._wrap(job_uuid, None,
                        "Posting failure: %s", ensure_known=False):
            job_posting = format_posting(job_uuid)
            job_posting = misc.binary_encode(jsonutils.dumps(job_posting))
            job_path = self._client.create(self._job_base,
                                           value=job_posting,
                                           sequence=True,
                                           ephemeral=False)
            job = ZookeeperJob(name, self, self._client,
                               self._persistence, job_path,
                               book=book, details=details,
                               uuid=job_uuid)
            self._job_cond.acquire()
            try:
                self._known_jobs[job_path] = job
                self._job_cond.notify_all()
            finally:
                self._job_cond.release()
            self._emit(jobboard.POSTED, details={'job': job})
            return job
Exemplo n.º 23
0
    def test_posting_owner_lost(self):

        with connect_close(self.board):
            j = self.board.post('test', p_utils.temporary_log_book())
            self.client.flush()
            self.assertEqual(states.UNCLAIMED, j.state)
            self.board.claim(j, self.board.name)
            self.client.flush()
            self.assertEqual(states.CLAIMED, j.state)

            # Forcefully delete the owner from the backend storage to make
            # sure the job becomes unclaimed (this may happen if some admin
            # manually deletes the lock).
            paths = list(six.iteritems(self.client.storage.paths))
            for (path, value) in paths:
                if path in self.bad_paths:
                    continue
                if path.endswith('lock'):
                    value['data'] = misc.binary_encode(jsonutils.dumps({}))
            self.assertEqual(states.UNCLAIMED, j.state)
Exemplo n.º 24
0
    def test_posting_owner_lost(self):

        with base.connect_close(self.board):
            with base.flush(self.client):
                j = self.board.post('test', p_utils.temporary_log_book())
            self.assertEqual(states.UNCLAIMED, j.state)
            with base.flush(self.client):
                self.board.claim(j, self.board.name)
            self.assertEqual(states.CLAIMED, j.state)

            # Forcefully delete the owner from the backend storage to make
            # sure the job becomes unclaimed (this may happen if some admin
            # manually deletes the lock).
            paths = list(six.iteritems(self.client.storage.paths))
            for (path, value) in paths:
                if path in self.bad_paths:
                    continue
                if path.endswith('lock'):
                    value['data'] = misc.binary_encode(jsonutils.dumps({}))
            self.assertEqual(states.UNCLAIMED, j.state)
Exemplo n.º 25
0
 def _save_logbook(self, book):
     # See if we have an existing logbook to merge with.
     e_lb = None
     try:
         e_lb = self._get_logbook(book.uuid)
     except exc.NotFound:
         pass
     if e_lb is not None:
         e_lb = e_lb.merge(book)
         for fd in book:
             if e_lb.find(fd.uuid) is None:
                 e_lb.add(fd)
         book = e_lb
     book_path = os.path.join(self._book_path, book.uuid)
     misc.ensure_tree(book_path)
     self._write_to(os.path.join(book_path, "metadata"), jsonutils.dumps(book.to_dict(marshal_time=True)))
     if len(book):
         flow_path = os.path.join(book_path, "flows")
         misc.ensure_tree(flow_path)
         self._run_with_process_lock("flow", self._save_flows_and_link, list(book), flow_path)
     return book
Exemplo n.º 26
0
 def _save_flow_details(self, flow_detail, ignore_missing):
     # See if we have an existing flow detail to merge with.
     e_fd = None
     try:
         e_fd = self._get_flow_details(flow_detail.uuid, lock=False)
     except EnvironmentError:
         if not ignore_missing:
             raise exc.NotFound("No flow details found with id: %s" % flow_detail.uuid)
     if e_fd is not None:
         e_fd = e_fd.merge(flow_detail)
         for ad in flow_detail:
             if e_fd.find(ad.uuid) is None:
                 e_fd.add(ad)
         flow_detail = e_fd
     flow_path = os.path.join(self._flow_path, flow_detail.uuid)
     misc.ensure_tree(flow_path)
     self._write_to(os.path.join(flow_path, "metadata"), jsonutils.dumps(flow_detail.to_dict()))
     if len(flow_detail):
         atom_path = os.path.join(flow_path, "atoms")
         misc.ensure_tree(atom_path)
         self._run_with_process_lock("atom", self._save_atoms_and_link, list(flow_detail), atom_path)
     return flow_detail
Exemplo n.º 27
0
 def claim(self, job, who):
     _check_who(who)
     with self._wrap(job.uuid, job.path, "Claiming failure: %s"):
         # NOTE(harlowja): post as json which will allow for future changes
         # more easily than a raw string/text.
         value = jsonutils.dumps({
             'owner': who,
         })
         try:
             self._client.create(job.lock_path,
                                 value=misc.binary_encode(value),
                                 ephemeral=True)
         except k_exceptions.NodeExistsException:
             # Try to see if we can find who the owner really is...
             try:
                 owner = self.find_owner(job)
             except Exception:
                 owner = None
             if owner:
                 msg = "Job %s already claimed by '%s'" % (job.uuid, owner)
             else:
                 msg = "Job %s already claimed" % (job.uuid)
             raise excp.UnclaimableJob(msg)
Exemplo n.º 28
0
    def _update_task_details(self, td, txn, create_missing=False):
        # Determine whether the desired data exists or not.
        td_path = paths.join(self.task_path, td.uuid)
        try:
            td_data, _zstat = self._client.get(td_path)
        except k_exc.NoNodeError:
            # Not-existent: create or raise exception.
            if create_missing:
                txn.create(td_path)
                e_td = logbook.TaskDetail(name=td.name, uuid=td.uuid)
            else:
                raise exc.NotFound("No task details found with id: %s"
                                   % td.uuid)
        else:
            # Existent: read it out.
            e_td = p_utils.unformat_task_detail(td.uuid,
                                                misc.decode_json(td_data))

        # Update and write it back
        e_td = p_utils.task_details_merge(e_td, td)
        td_data = p_utils.format_task_detail(e_td)
        txn.set_data(td_path, misc.binary_encode(jsonutils.dumps(td_data)))
        return e_td
Exemplo n.º 29
0
    def _update_task_details(self, td, txn, create_missing=False):
        # Determine whether the desired data exists or not.
        td_path = paths.join(self.task_path, td.uuid)
        try:
            td_data, _zstat = self._client.get(td_path)
        except k_exc.NoNodeError:
            # Not-existent: create or raise exception.
            if create_missing:
                txn.create(td_path)
                e_td = logbook.TaskDetail(name=td.name, uuid=td.uuid)
            else:
                raise exc.NotFound("No task details found with id: %s" %
                                   td.uuid)
        else:
            # Existent: read it out.
            e_td = p_utils.unformat_task_detail(td.uuid,
                                                misc.decode_json(td_data))

        # Update and write it back
        e_td = p_utils.task_details_merge(e_td, td)
        td_data = p_utils.format_task_detail(e_td)
        txn.set_data(td_path, misc.binary_encode(jsonutils.dumps(td_data)))
        return e_td
Exemplo n.º 30
0
 def claim(self, job, who):
     _check_who(who)
     job_path, lock_path = _get_paths(self.path, job.uuid)
     with self._wrap(job.uuid, "Claiming failure: %s"):
         # NOTE(harlowja): post as json which will allow for future changes
         # more easily than a raw string/text.
         value = jsonutils.dumps({
             'owner': who,
         })
         try:
             self._client.create(lock_path,
                                 value=misc.binary_encode(value),
                                 ephemeral=True)
         except k_exceptions.NodeExistsException:
             # Try to see if we can find who the owner really is...
             try:
                 owner = self.find_owner(job)
             except Exception:
                 owner = None
             if owner:
                 msg = "Job %s already claimed by '%s'" % (job.uuid, owner)
             else:
                 msg = "Job %s already claimed" % (job.uuid)
             raise excp.UnclaimableJob(msg)
Exemplo n.º 31
0
 def process_bind_param(self, value, dialect):
     return jsonutils.dumps(value)
Exemplo n.º 32
0
 def process_bind_param(self, value, dialect):
     if value is None:
         return None
     return jsonutils.dumps(persistence_utils.failure_to_dict(value))
Exemplo n.º 33
0
 def process_bind_param(self, value, dialect):
     return jsonutils.dumps(value)
Exemplo n.º 34
0
 def process_bind_param(self, value, dialect):
     if value is None:
         return None
     return jsonutils.dumps(persistence_utils.failure_to_dict(value))