def _save_flow_details(self, flow_detail, ignore_missing): # See if we have an existing flow detail to merge with. e_fd = None try: e_fd = self._get_flow_details(flow_detail.uuid, lock=False) except EnvironmentError: if not ignore_missing: raise exc.NotFound("No flow details found with id: %s" % flow_detail.uuid) if e_fd is not None: e_fd = p_utils.flow_details_merge(e_fd, flow_detail) for td in flow_detail: if e_fd.find(td.uuid) is None: e_fd.add(td) flow_detail = e_fd flow_path = os.path.join(self._flow_path, flow_detail.uuid) misc.ensure_tree(flow_path) self._write_to( os.path.join(flow_path, 'metadata'), jsonutils.dumps(p_utils.format_flow_detail(flow_detail))) if len(flow_detail): task_path = os.path.join(flow_path, 'tasks') misc.ensure_tree(task_path) self._run_with_process_lock('task', self._save_tasks_and_link, list(flow_detail), task_path) return flow_detail
def _update_flow_details(self, fd, txn, create_missing=False): # Determine whether the desired data exists or not fd_path = paths.join(self.flow_path, fd.uuid) try: fd_data, _zstat = self._client.get(fd_path) except k_exc.NoNodeError: # Not-existent: create or raise exception if create_missing: txn.create(fd_path) e_fd = logbook.FlowDetail(name=fd.name, uuid=fd.uuid) else: raise exc.NotFound("No flow details found with id: %s" % fd.uuid) else: # Existent: read it out e_fd = p_utils.unformat_flow_detail(fd.uuid, misc.decode_json(fd_data)) # Update and write it back e_fd = p_utils.flow_details_merge(e_fd, fd) fd_data = p_utils.format_flow_detail(e_fd) txn.set_data(fd_path, misc.binary_encode(jsonutils.dumps(fd_data))) for td in fd: td_path = paths.join(fd_path, td.uuid) # NOTE(harlowja): create an entry in the flow detail path # for the provided task detail so that a reference exists # from the flow detail to its task details. if not self._client.exists(td_path): txn.create(td_path) e_fd.add(self._update_task_details(td, txn, create_missing=True)) return e_fd
def _save_flow_details(self, flow_detail, ignore_missing): # See if we have an existing flow detail to merge with. e_fd = None try: e_fd = self._get_flow_details(flow_detail.uuid, lock=False) except EnvironmentError: if not ignore_missing: raise exc.NotFound("No flow details found with id: %s" % flow_detail.uuid) if e_fd is not None: e_fd = p_utils.flow_details_merge(e_fd, flow_detail) for td in flow_detail: if e_fd.find(td.uuid) is None: e_fd.add(td) flow_detail = e_fd flow_path = os.path.join(self._flow_path, flow_detail.uuid) misc.ensure_tree(flow_path) self._write_to( os.path.join(flow_path, 'metadata'), jsonutils.dumps(p_utils.format_flow_detail(flow_detail))) if len(flow_detail): task_path = os.path.join(flow_path, 'tasks') misc.ensure_tree(task_path) self._run_with_process_lock('task', self._save_tasks_and_link, list(flow_detail), task_path) return flow_detail
def _update_flow_details(self, fd, txn, create_missing=False): # Determine whether the desired data exists or not fd_path = paths.join(self.flow_path, fd.uuid) try: fd_data, _zstat = self._client.get(fd_path) except k_exc.NoNodeError: # Not-existent: create or raise exception if create_missing: txn.create(fd_path) e_fd = logbook.FlowDetail(name=fd.name, uuid=fd.uuid) else: raise exc.NotFound("No flow details found with id: %s" % fd.uuid) else: # Existent: read it out e_fd = p_utils.unformat_flow_detail(fd.uuid, misc.decode_json(fd_data)) # Update and write it back e_fd = p_utils.flow_details_merge(e_fd, fd) fd_data = p_utils.format_flow_detail(e_fd) txn.set_data(fd_path, misc.binary_encode(jsonutils.dumps(fd_data))) for td in fd: td_path = paths.join(fd_path, td.uuid) # NOTE(harlowja): create an entry in the flow detail path # for the provided task detail so that a reference exists # from the flow detail to its task details. if not self._client.exists(td_path): txn.create(td_path) e_fd.add(self._update_task_details(td, txn, create_missing=True)) return e_fd
def _create_logbook(lb_path, txn): lb_data = p_utils.format_logbook(lb, created_at=None) txn.create(lb_path, misc.binary_encode(jsonutils.dumps(lb_data))) for fd in lb: # NOTE(harlowja): create an entry in the logbook path # for the provided flow detail so that a reference exists # from the logbook to its flow details. txn.create(paths.join(lb_path, fd.uuid)) fd_path = paths.join(self.flow_path, fd.uuid) fd_data = jsonutils.dumps(p_utils.format_flow_detail(fd)) txn.create(fd_path, misc.binary_encode(fd_data)) for td in fd: # NOTE(harlowja): create an entry in the flow detail path # for the provided task detail so that a reference exists # from the flow detail to its task details. txn.create(paths.join(fd_path, td.uuid)) td_path = paths.join(self.task_path, td.uuid) td_data = jsonutils.dumps(p_utils.format_task_detail(td)) txn.create(td_path, misc.binary_encode(td_data)) return lb
def _create_logbook(lb_path, txn): lb_data = p_utils.format_logbook(lb, created_at=None) txn.create(lb_path, misc.binary_encode(jsonutils.dumps(lb_data))) for fd in lb: # NOTE(harlowja): create an entry in the logbook path # for the provided flow detail so that a reference exists # from the logbook to its flow details. txn.create(paths.join(lb_path, fd.uuid)) fd_path = paths.join(self.flow_path, fd.uuid) fd_data = jsonutils.dumps(p_utils.format_flow_detail(fd)) txn.create(fd_path, misc.binary_encode(fd_data)) for td in fd: # NOTE(harlowja): create an entry in the flow detail path # for the provided task detail so that a reference exists # from the flow detail to its task details. txn.create(paths.join(fd_path, td.uuid)) td_path = paths.join(self.task_path, td.uuid) td_data = jsonutils.dumps(p_utils.format_task_detail(td)) txn.create(td_path, misc.binary_encode(td_data)) return lb