def _step_create(): for path in (self._book_path, self._flow_path, self._task_path): try: misc.ensure_tree(path) except EnvironmentError as e: raise exc.StorageError("Unable to create logbooks" " required child path %s" % path, e)
def _save_logbook(self, book): # See if we have an existing logbook to merge with. e_lb = None try: e_lb = self._get_logbook(book.uuid) except exc.NotFound: pass if e_lb is not None: e_lb = p_utils.logbook_merge(e_lb, book) for fd in book: if e_lb.find(fd.uuid) is None: e_lb.add(fd) book = e_lb book_path = os.path.join(self._book_path, book.uuid) misc.ensure_tree(book_path) created_at = None if e_lb is not None: created_at = e_lb.created_at self._write_to(os.path.join(book_path, 'metadata'), jsonutils.dumps( p_utils.format_logbook(book, created_at=created_at))) if len(book): flow_path = os.path.join(book_path, 'flows') misc.ensure_tree(flow_path) self._run_with_process_lock('flow', self._save_flows_and_link, list(book), flow_path) return book
def _save_flow_details(self, flow_detail, ignore_missing): # See if we have an existing flow detail to merge with. e_fd = None try: e_fd = self._get_flow_details(flow_detail.uuid, lock=False) except EnvironmentError: if not ignore_missing: raise exc.NotFound("No flow details found with id: %s" % flow_detail.uuid) if e_fd is not None: e_fd = p_utils.flow_details_merge(e_fd, flow_detail) for td in flow_detail: if e_fd.find(td.uuid) is None: e_fd.add(td) flow_detail = e_fd flow_path = os.path.join(self._flow_path, flow_detail.uuid) misc.ensure_tree(flow_path) self._write_to( os.path.join(flow_path, 'metadata'), jsonutils.dumps(p_utils.format_flow_detail(flow_detail))) if len(flow_detail): task_path = os.path.join(flow_path, 'tasks') misc.ensure_tree(task_path) self._run_with_process_lock('task', self._save_tasks_and_link, list(flow_detail), task_path) return flow_detail
def acquire(self): basedir = os.path.dirname(self.fname) if not os.path.exists(basedir): misc.ensure_tree(basedir) LOG.info('Created lock path: %s', basedir) self.lockfile = open(self.fname, 'w') while True: try: # Using non-blocking locks since green threads are not # patched to deal with blocking locking calls. # Also upon reading the MSDN docs for locking(), it seems # to have a laughable 10 attempts "blocking" mechanism. self.trylock() LOG.debug('Got file lock "%s"', self.fname) return True except IOError as e: if e.errno in (errno.EACCES, errno.EAGAIN): # external locks synchronise things like iptables # updates - give it some time to prevent busy spinning time.sleep(0.01) else: raise threading.ThreadError("Unable to acquire lock on" " `%(filename)s` due to" " %(exception)s" % { 'filename': self.fname, 'exception': e, })
def acquire(self): basedir = os.path.dirname(self.fname) if not os.path.exists(basedir): misc.ensure_tree(basedir) LOG.debug('Created lock path: %s', basedir) self.lockfile = open(self.fname, 'w') while True: try: # Using non-blocking locks since green threads are not # patched to deal with blocking locking calls. # Also upon reading the MSDN docs for locking(), it seems # to have a laughable 10 attempts "blocking" mechanism. self.trylock() LOG.debug('Got file lock "%s"', self.fname) return True except IOError as e: if e.errno in (errno.EACCES, errno.EAGAIN): # external locks synchronise things like iptables # updates - give it some time to prevent busy spinning time.sleep(0.01) else: raise threading.ThreadError("Unable to acquire lock on" " `%(filename)s` due to" " %(exception)s" % { 'filename': self.fname, 'exception': e, })
def upgrade(self): def _step_create(): for d in (self._book_path, self._flow_path, self._task_path): misc.ensure_tree(d) misc.ensure_tree(self._backend.base_path) misc.ensure_tree(self._backend.lock_path) self._run_with_process_lock("init", _step_create)
def upgrade(self): def _step_create(): for path in (self._book_path, self._flow_path, self._atom_path): try: misc.ensure_tree(path) except EnvironmentError as e: raise exc.StorageFailure("Unable to create logbooks" " required child path %s" % path, e) for path in (self._backend.base_path, self._backend.lock_path): try: misc.ensure_tree(path) except EnvironmentError as e: raise exc.StorageFailure("Unable to create logbooks required" " path %s" % path, e) self._run_with_process_lock("init", _step_create)
def upgrade(self): def _step_create(): for path in (self._book_path, self._flow_path, self._task_path): try: misc.ensure_tree(path) except EnvironmentError as e: raise exc.StorageError("Unable to create logbooks" " required child path %s" % path, e) for path in (self._backend.base_path, self._backend.lock_path): try: misc.ensure_tree(path) except EnvironmentError as e: raise exc.StorageError("Unable to create logbooks required" " path %s" % path, e) self._run_with_process_lock("init", _step_create)
def _save_logbook(self, book): # See if we have an existing logbook to merge with. e_lb = None try: e_lb = self._get_logbook(book.uuid) except exc.NotFound: pass if e_lb is not None: e_lb = e_lb.merge(book) for fd in book: if e_lb.find(fd.uuid) is None: e_lb.add(fd) book = e_lb book_path = os.path.join(self._book_path, book.uuid) misc.ensure_tree(book_path) self._write_to(os.path.join(book_path, "metadata"), jsonutils.dumps(book.to_dict(marshal_time=True))) if len(book): flow_path = os.path.join(book_path, "flows") misc.ensure_tree(flow_path) self._run_with_process_lock("flow", self._save_flows_and_link, list(book), flow_path) return book
def _save_flow_details(self, flow_detail, ignore_missing): # See if we have an existing flow detail to merge with. e_fd = None try: e_fd = self._get_flow_details(flow_detail.uuid, lock=False) except EnvironmentError: if not ignore_missing: raise exc.NotFound("No flow details found with id: %s" % flow_detail.uuid) if e_fd is not None: e_fd = e_fd.merge(flow_detail) for ad in flow_detail: if e_fd.find(ad.uuid) is None: e_fd.add(ad) flow_detail = e_fd flow_path = os.path.join(self._flow_path, flow_detail.uuid) misc.ensure_tree(flow_path) self._write_to(os.path.join(flow_path, "metadata"), jsonutils.dumps(flow_detail.to_dict())) if len(flow_detail): atom_path = os.path.join(flow_path, "atoms") misc.ensure_tree(atom_path) self._run_with_process_lock("atom", self._save_atoms_and_link, list(flow_detail), atom_path) return flow_detail
def _ensure_path(self, path): with _storagefailure_wrapper(): misc.ensure_tree(path)
def _step_create(): for d in (self._book_path, self._flow_path, self._task_path): misc.ensure_tree(d)