def add(self, entry): """Add entry to inventory. To add a file to a branch ready to be committed, use Branch.add, which calls this. Returns the new entry object. """ if entry.file_id in self._byid: raise errors.DuplicateFileId(entry.file_id, self._byid[entry.file_id]) if entry.parent_id is None: self.root = entry else: try: parent = self._byid[entry.parent_id] except KeyError: raise BzrError("parent_id {%s} not in inventory" % entry.parent_id) if entry.name in parent.children: raise BzrError("%s is already versioned" % osutils.pathjoin( self.id2path(parent.file_id), entry.name).encode('utf-8')) parent.children[entry.name] = entry return self._add_child(entry)
def __init__(self, virtual_url_fragment, reason=None): BzrError.__init__(self) self.virtual_url_fragment = virtual_url_fragment if reason is not None: self.reason = str(reason) else: self.reason = ''
def rename(self, file_id, new_parent_id, new_name): """Move a file within the inventory. This can change either the name, or the parent, or both. This does not move the working file. """ new_name = ensure_normalized_name(new_name) if not is_valid_name(new_name): raise BzrError("not an acceptable filename: %r" % new_name) new_parent = self._byid[new_parent_id] if new_name in new_parent.children: raise BzrError("%r already exists in %r" % (new_name, self.id2path(new_parent_id))) new_parent_idpath = self.get_idpath(new_parent_id) if file_id in new_parent_idpath: raise BzrError( "cannot move directory %r into a subdirectory of itself, %r" % (self.id2path(file_id), self.id2path(new_parent_id))) file_ie = self._byid[file_id] old_parent = self._byid[file_ie.parent_id] # TODO: Don't leave things messed up if this fails del old_parent.children[file_ie.name] new_parent.children[new_name] = file_ie file_ie.name = new_name file_ie.parent_id = new_parent_id
def add_sha(d, revision_id, sha1): if revision_id is None: if sha1 is not None: raise BzrError('A Null revision should always' 'have a null sha1 hash') return if revision_id in d: # This really should have been validated as part # of _validate_revisions but lets do it again if sha1 != d[revision_id]: raise BzrError('** Revision %r referenced with 2 different' ' sha hashes %s != %s' % (revision_id, sha1, d[revision_id])) else: d[revision_id] = sha1
def _unpack_entry(self, elt): ## original format inventories don't have a parent_id for ## nodes in the root directory, but it's cleaner to use one ## internally. parent_id = elt.get('parent_id') if parent_id is None: parent_id = ROOT_ID kind = elt.get('kind') if kind == 'directory': ie = inventory.InventoryDirectory(elt.get('file_id'), elt.get('name'), parent_id) elif kind == 'file': ie = inventory.InventoryFile(elt.get('file_id'), elt.get('name'), parent_id) ie.text_id = elt.get('text_id') ie.text_sha1 = elt.get('text_sha1') v = elt.get('text_size') ie.text_size = v and int(v) elif kind == 'symlink': ie = inventory.InventoryLink(elt.get('file_id'), elt.get('name'), parent_id) ie.symlink_target = elt.get('symlink_target') else: raise BzrError("unknown kind %r" % kind) ## mutter("read inventoryentry: %r", elt.attrib) return ie
def add_entry(file_id): path = self.id2path(file_id) if path is None: return if path == '': parent_id = None else: parent_path = dirname(path) parent_id = self.path2id(parent_path) kind = self.kind(file_id) revision_id = self.get_last_changed(file_id) name = basename(path) if kind == 'directory': ie = InventoryDirectory(file_id, name, parent_id) elif kind == 'file': ie = InventoryFile(file_id, name, parent_id) ie.executable = self.is_executable(file_id) elif kind == 'symlink': ie = InventoryLink(file_id, name, parent_id) ie.symlink_target = self.get_symlink_target(file_id, path) ie.revision = revision_id if kind == 'file': ie.text_size, ie.text_sha1 = self.get_size_and_sha1(file_id) if ie.text_size is None: raise BzrError('Got a text_size of None for file_id %r' % file_id) inv.add(ie)
def note_last_changed(self, file_id, revision_id): if (file_id in self._last_changed and self._last_changed[file_id] != revision_id): raise BzrError('Mismatched last-changed revision for file_id {%s}' ': %s != %s' % (file_id, self._last_changed[file_id], revision_id)) self._last_changed[file_id] = revision_id
def _put_in_tar(self, item, tree): """populate item for stashing in a tar, and return the content stream. If no content is available, return None. """ raise BzrError("don't know how to export {%s} of kind %r" % (self.file_id, self.kind))
def _put_on_disk(self, fullpath, tree): """See InventoryEntry._put_on_disk.""" try: os.symlink(self.symlink_target, fullpath) except OSError, e: raise BzrError("Failed to create symlink %r -> %r, error: %s" % (fullpath, self.symlink_target, e))
def _copy_one(self, fileid, suffix, other, pb): # TODO: Once the copy_to interface is improved to allow a source # and destination targets, then we can always do the copy # as long as other is a TextStore if not (isinstance(other, TextStore) and other._prefixed == self._prefixed): return super(TextStore, self)._copy_one(fileid, suffix, other, pb) mutter('_copy_one: %r, %r', fileid, suffix) path = other._get_name(fileid, suffix) if path is None: raise KeyError(fileid + '-' + str(suffix)) try: result = other._transport.copy_to([path], self._transport, mode=self._file_mode) except NoSuchFile: if not self._prefixed: raise try: self._transport.mkdir(osutils.dirname(path), mode=self._dir_mode) except FileExists: pass result = other._transport.copy_to([path], self._transport, mode=self._file_mode) if result != 1: raise BzrError('Unable to copy file: %r' % (path, ))
def _unpack_revision(self, elt): """XML Element -> Revision object""" format = elt.get('format') format_num = self.format_num if self.revision_format_num is not None: format_num = self.revision_format_num if format is not None: if format != format_num: raise BzrError("invalid format version %r on revision" % format) get_cached = _get_utf8_or_ascii rev = Revision(committer=elt.get('committer'), timestamp=float(elt.get('timestamp')), revision_id=get_cached(elt.get('revision_id')), inventory_sha1=elt.get('inventory_sha1')) parents = elt.find('parents') or [] for p in parents: rev.parent_ids.append(get_cached(p.get('revision_id'))) self._unpack_revision_properties(elt, rev) v = elt.get('timezone') if v is None: rev.timezone = 0 else: rev.timezone = int(v) rev.message = elt.findtext('message') # text of <message> return rev
def run(self, from_location, to_location=None, lightweight=False, **kw): if lightweight: import errno import os from bzrlib.branch import Branch from bzrlib.errors import BzrError from bzrlib.urlutils import basename, dirname, join from bzrlib.workingtree import WorkingTree br_from = Branch.open(from_location) repo = br_from.repository if not repo.is_shared(): raise BzrError('branch --lightweight supported ' 'only for shared repository') wt_from = WorkingTree.open(from_location) working_path = wt_from.bzrdir.root_transport.base from_branch_path = br_from.bzrdir.root_transport.base if working_path == from_branch_path: raise BzrError('source branch is not lightweight checkout') if to_location is None: raise BzrError('you should specify name for new branch') from_basename = basename(from_branch_path) to_basename = basename(to_location) if from_basename == to_basename: raise BzrError('basename of source and destination is equal') to_branch_path = join(dirname(from_branch_path), to_basename) # make branch print >> self.outf, 'Create branch: %s => %s' % (from_branch_path, to_branch_path) builtins.cmd_branch.run(self, from_branch_path, to_branch_path, **kw) # make lightweight chekout source = Branch.open(to_branch_path) revision_id = source.last_revision() try: os.mkdir(to_location) except OSError, e: if e.errno == errno.EEXIST: raise errors.BzrCommandError( 'Target directory "%s" already' ' exists.' % to_location) if e.errno == errno.ENOENT: raise errors.BzrCommandError( 'Parent of "%s" does not exist.' % to_location) else: raise source.create_checkout(to_location, revision_id, lightweight)
def etckeeper_startcommit_hook(tree): abspath = getattr(tree, "abspath", None) if abspath is None or not os.path.exists(abspath(".etckeeper")): # Only run the commit hook when this is an etckeeper branch return ret = subprocess.call(["etckeeper", "pre-commit", abspath(".")]) if ret != 0: raise BzrError("etckeeper pre-commit failed")
def zip_exporter(tree, dest, root): """ Export this tree to a new zip file. `dest` will be created holding the contents of this tree; if it already exists, it will be overwritten". """ import time now = time.localtime()[:6] mutter('export version %r', tree) compression = zipfile.ZIP_DEFLATED zipf = zipfile.ZipFile(dest, "w", compression) inv = tree.inventory try: entries = inv.iter_entries() entries.next() # skip root for dp, ie in entries: # The .bzr* namespace is reserved for "magic" files like # .bzrignore and .bzrrules - do not export these if dp.startswith(".bzr"): continue file_id = ie.file_id mutter(" export {%s} kind %s to %s", file_id, ie.kind, dest) # zipfile.ZipFile switches all paths to forward # slashes anyway, so just stick with that. filename = osutils.pathjoin(root, dp).encode('utf8') if ie.kind == "file": zinfo = zipfile.ZipInfo(filename=filename, date_time=now) zinfo.compress_type = compression zinfo.external_attr = _FILE_ATTR zipf.writestr(zinfo, tree.get_file_text(file_id)) elif ie.kind == "directory": # Directories must contain a trailing slash, to indicate # to the zip routine that they are really directories and # not just empty files. zinfo = zipfile.ZipInfo(filename=filename + '/', date_time=now) zinfo.compress_type = compression zinfo.external_attr = _DIR_ATTR zipf.writestr(zinfo, '') elif ie.kind == "symlink": zinfo = zipfile.ZipInfo(filename=(filename + '.lnk'), date_time=now) zinfo.compress_type = compression zinfo.external_attr = _FILE_ATTR zipf.writestr(zinfo, ie.symlink_target) zipf.close() except UnicodeEncodeError: zipf.close() os.remove(dest) from bzrlib.errors import BzrError raise BzrError("Can't export non-ascii filenames to zip")
def removed(kind, extra, lines): info = extra.split(' // ') if len(info) > 1: # TODO: in the future we might allow file ids to be # given for removed entries raise BzrError('removed action lines should only have the path' ': %r' % extra) path = info[0] bundle_tree.note_deletion(path)
def _add_child(self, entry): """Add an entry to the inventory, without adding it to its parent""" if entry.file_id in self._byid: raise BzrError("inventory already contains entry with id {%s}" % entry.file_id) self._byid[entry.file_id] = entry for child in getattr(entry, 'children', {}).itervalues(): self._add_child(child) return entry
def _validate_references_from_repository(self, repository): """Now that we have a repository which should have some of the revisions we care about, go through and validate all of them that we can. """ rev_to_sha = {} inv_to_sha = {} def add_sha(d, revision_id, sha1): if revision_id is None: if sha1 is not None: raise BzrError('A Null revision should always' 'have a null sha1 hash') return if revision_id in d: # This really should have been validated as part # of _validate_revisions but lets do it again if sha1 != d[revision_id]: raise BzrError('** Revision %r referenced with 2 different' ' sha hashes %s != %s' % (revision_id, sha1, d[revision_id])) else: d[revision_id] = sha1 # All of the contained revisions were checked # in _validate_revisions checked = {} for rev_info in self.revisions: checked[rev_info.revision_id] = True add_sha(rev_to_sha, rev_info.revision_id, rev_info.sha1) for (rev, rev_info) in zip(self.real_revisions, self.revisions): add_sha(inv_to_sha, rev_info.revision_id, rev_info.inventory_sha1) count = 0 missing = {} for revision_id, sha1 in rev_to_sha.iteritems(): if repository.has_revision(revision_id): testament = StrictTestament.from_revision( repository, revision_id) local_sha1 = self._testament_sha1_from_revision( repository, revision_id) if sha1 != local_sha1: raise BzrError('sha1 mismatch. For revision id {%s}' 'local: %s, bundle: %s' % (revision_id, local_sha1, sha1)) else: count += 1 elif revision_id not in checked: missing[revision_id] = sha1 if len(missing) > 0: # I don't know if this is an error yet warning('Not all revision hashes could be validated.' ' Unable validate %d hashes' % len(missing)) mutter('Verified %d sha hashes for the bundle.' % count) self._validated_revisions_against_repo = True
def modified(kind, extra, lines): info = extra.split(' // ') if len(info) < 1: raise BzrError('modified action lines have at least' 'the path in them: %r' % extra) path = info[0] last_modified, encoding = extra_info(info[1:], path) revision = get_rev_id(last_modified, path, kind) if lines: do_patch(path, lines, encoding)
def read_text_inventory(tf): """Return an inventory read in from tf""" if tf.readline() != START_MARK: raise BzrError("missing start mark") inv = Inventory() for l in tf: fields = l.split(' ') if fields[0] == '#': break ie = { 'file_id': fields[0], 'name': unescape(fields[1]), 'kind': fields[2], 'parent_id': fields[3] } ##inv.add(ie) if l != END_MARK: raise BzrError("missing end mark") return inv
def added(kind, extra, lines): info = extra.split(' // ') if len(info) <= 1: raise BzrError('add action lines require the path and file id' ': %r' % extra) elif len(info) > 5: raise BzrError('add action lines have fewer than 5 entries.' ': %r' % extra) path = info[0] if not info[1].startswith('file-id:'): raise BzrError('The file-id should follow the path for an add' ': %r' % extra) # This will be Unicode because of how the stream is read. Turn it # back into a utf8 file_id file_id = osutils.safe_file_id(info[1][8:], warn=False) bundle_tree.note_id(file_id, path, kind) # this will be overridden in extra_info if executable is specified. bundle_tree.note_executable(path, False) last_changed, encoding = extra_info(info[2:], path) revision = get_rev_id(last_changed, path, kind) if kind == 'directory': return do_patch(path, lines, encoding)
def add(self, f, fileid, suffix=None): """Add contents of a file into the store. f -- A file-like object """ mutter("add store entry %r", fileid) names = self._id_to_names(fileid, suffix) if self._transport.has_any(names): raise BzrError("store %r already contains id %r" % (self._transport.base, fileid)) # Most of the time, just adding the file will work # if we find a time where it fails, (because the dir # doesn't exist), then create the dir, and try again self._add(names[0], f)
def make_entry(kind, name, parent_id, file_id=None): """Create an inventory entry. :param kind: the type of inventory entry to create. :param name: the basename of the entry. :param parent_id: the parent_id of the entry. :param file_id: the file_id to use. if None, one will be created. """ if file_id is None: file_id = generate_ids.gen_file_id(name) name = ensure_normalized_name(name) try: factory = entry_factory[kind] except KeyError: raise BzrError("unknown kind %r" % kind) return factory(file_id, name, parent_id)
def renamed(kind, extra, lines): info = extra.split(' // ') if len(info) < 2: raise BzrError('renamed action lines need both a from and to' ': %r' % extra) old_path = info[0] if info[1].startswith('=> '): new_path = info[1][3:] else: new_path = info[1] bundle_tree.note_rename(old_path, new_path) last_modified, encoding = extra_info(info[2:], new_path) revision = get_rev_id(last_modified, new_path, kind) if lines: do_patch(new_path, lines, encoding)
def write_old_to_temp(tree, file_id, rev_no): # we want the same suffix as before so syntax highlighting works from tempfile import NamedTemporaryFile from os.path import splitext, basename if not tree.has_id(file_id): raise BzrError("file {%s} wasn't in the basis version %s" % (file_id, tree)) old_filename = tree.id2path(file_id) name_base, name_suffix = splitext(basename(old_filename)) basis_tmp = NamedTemporaryFile(suffix=('.%s.tmp%s' % (rev_no, name_suffix)), prefix=name_base) basis_tmp.write(tree.get_file_text(file_id)) basis_tmp.flush() try: os.chmod(basis_tmp.name, 0444) except OSError: pass return basis_tmp
def _validate_revision(self, tree, revision_id): """Make sure all revision entries match their checksum.""" # This is a mapping from each revision id to its sha hash rev_to_sha1 = {} rev = self.get_revision(revision_id) rev_info = self.get_revision_info(revision_id) if not (rev.revision_id == rev_info.revision_id): raise AssertionError() if not (rev.revision_id == revision_id): raise AssertionError() sha1 = self._testament_sha1(rev, tree) if sha1 != rev_info.sha1: raise TestamentMismatch(rev.revision_id, rev_info.sha1, sha1) if rev.revision_id in rev_to_sha1: raise BzrError('Revision {%s} given twice in the list' % (rev.revision_id)) rev_to_sha1[rev.revision_id] = sha1
def _unpack_revision(self, elt): """XML Element -> Revision object""" # <changeset> is deprecated... if elt.tag not in ('revision', 'changeset'): raise BzrError("unexpected tag in revision file: %r" % elt) rev = Revision(committer=elt.get('committer'), timestamp=float(elt.get('timestamp')), revision_id=elt.get('revision_id'), inventory_id=elt.get('inventory_id'), inventory_sha1=elt.get('inventory_sha1')) precursor = elt.get('precursor') precursor_sha1 = elt.get('precursor_sha1') pelts = elt.find('parents') if pelts: for p in pelts: rev.parent_ids.append(p.get('revision_id')) rev.parent_sha1s.append(p.get('revision_sha1')) if precursor: # must be consistent prec_parent = rev.parent_ids[0] elif precursor: # revisions written prior to 0.0.5 have a single precursor # give as an attribute rev.parent_ids.append(precursor) rev.parent_sha1s.append(precursor_sha1) v = elt.get('timezone') rev.timezone = v and int(v) rev.message = elt.findtext('message') # text of <message> return rev
def _put_on_disk(self, fullpath, tree): """Put this entry onto disk at fullpath, from tree tree.""" raise BzrError("don't know how to export {%s} of kind %r" % (self.file_id, self.kind))
def __init__(self, msg): BzrError.__init__(self) self.msg = msg
def eol_lookup(key): filter = _eol_filter_stack_map.get(key) if filter is None: raise BzrError("Unknown eol value '%s'" % key) return filter
def commit(self, message=None, timestamp=None, timezone=None, committer=None, specific_files=None, rev_id=None, allow_pointless=True, strict=False, verbose=False, revprops=None, working_tree=None, local=False, reporter=None, config=None, message_callback=None, recursive='down', exclude=None): """Commit working copy as a new revision. :param message: the commit message (it or message_callback is required) :param timestamp: if not None, seconds-since-epoch for a postdated/predated commit. :param specific_files: If true, commit only those files. :param rev_id: If set, use this as the new revision id. Useful for test or import commands that need to tightly control what revisions are assigned. If you duplicate a revision id that exists elsewhere it is your own fault. If null (default), a time/random revision id is generated. :param allow_pointless: If true (default), commit even if nothing has changed and no merges are recorded. :param strict: If true, don't allow a commit if the working tree contains unknown files. :param revprops: Properties for new revision :param local: Perform a local only commit. :param reporter: the reporter to use or None for the default :param verbose: if True and the reporter is not None, report everything :param recursive: If set to 'down', commit in any subtrees that have pending changes of any sort during this commit. :param exclude: None or a list of relative paths to exclude from the commit. Pending changes to excluded files will be ignored by the commit. """ mutter('preparing to commit') if working_tree is None: raise BzrError("working_tree must be passed into commit().") else: self.work_tree = working_tree self.branch = self.work_tree.branch if getattr(self.work_tree, 'requires_rich_root', lambda: False)(): if not self.branch.repository.supports_rich_root(): raise errors.RootNotRich() if message_callback is None: if message is not None: if isinstance(message, str): message = message.decode(bzrlib.user_encoding) message_callback = lambda x: message else: raise BzrError("The message or message_callback keyword" " parameter is required for commit().") self.bound_branch = None self.any_entries_changed = False self.any_entries_deleted = False if exclude is not None: self.exclude = sorted( minimum_path_selection(exclude)) else: self.exclude = [] self.local = local self.master_branch = None self.master_locked = False self.recursive = recursive self.rev_id = None if specific_files is not None: self.specific_files = sorted( minimum_path_selection(specific_files)) else: self.specific_files = None self.specific_file_ids = None self.allow_pointless = allow_pointless self.revprops = revprops self.message_callback = message_callback self.timestamp = timestamp self.timezone = timezone self.committer = committer self.strict = strict self.verbose = verbose # accumulates an inventory delta to the basis entry, so we can make # just the necessary updates to the workingtree's cached basis. self._basis_delta = [] self.work_tree.lock_write() self.pb = bzrlib.ui.ui_factory.nested_progress_bar() self.basis_revid = self.work_tree.last_revision() self.basis_tree = self.work_tree.basis_tree() self.basis_tree.lock_read() try: # Cannot commit with conflicts present. if len(self.work_tree.conflicts()) > 0: raise ConflictsInTree # Setup the bound branch variables as needed. self._check_bound_branch() # Check that the working tree is up to date old_revno, new_revno = self._check_out_of_date_tree() # Complete configuration setup if reporter is not None: self.reporter = reporter elif self.reporter is None: self.reporter = self._select_reporter() if self.config is None: self.config = self.branch.get_config() # If provided, ensure the specified files are versioned if self.specific_files is not None: # Note: This routine is being called because it raises # PathNotVersionedError as a side effect of finding the IDs. We # later use the ids we found as input to the working tree # inventory iterator, so we only consider those ids rather than # examining the whole tree again. # XXX: Dont we have filter_unversioned to do this more # cheaply? self.specific_file_ids = tree.find_ids_across_trees( specific_files, [self.basis_tree, self.work_tree]) # Setup the progress bar. As the number of files that need to be # committed in unknown, progress is reported as stages. # We keep track of entries separately though and include that # information in the progress bar during the relevant stages. self.pb_stage_name = "" self.pb_stage_count = 0 self.pb_stage_total = 5 if self.bound_branch: self.pb_stage_total += 1 self.pb.show_pct = False self.pb.show_spinner = False self.pb.show_eta = False self.pb.show_count = True self.pb.show_bar = True self.basis_inv = self.basis_tree.inventory self._gather_parents() # After a merge, a selected file commit is not supported. # See 'bzr help merge' for an explanation as to why. if len(self.parents) > 1 and self.specific_files: raise errors.CannotCommitSelectedFileMerge(self.specific_files) # Excludes are a form of selected file commit. if len(self.parents) > 1 and self.exclude: raise errors.CannotCommitSelectedFileMerge(self.exclude) # Collect the changes self._set_progress_stage("Collecting changes", entries_title="Directory") self.builder = self.branch.get_commit_builder(self.parents, self.config, timestamp, timezone, committer, revprops, rev_id) try: # find the location being committed to if self.bound_branch: master_location = self.master_branch.base else: master_location = self.branch.base # report the start of the commit self.reporter.started(new_revno, self.rev_id, master_location) self._update_builder_with_changes() self._report_and_accumulate_deletes() self._check_pointless() # TODO: Now the new inventory is known, check for conflicts. # ADHB 2006-08-08: If this is done, populate_new_inv should not add # weave lines, because nothing should be recorded until it is known # that commit will succeed. self._set_progress_stage("Saving data locally") self.builder.finish_inventory() # Prompt the user for a commit message if none provided message = message_callback(self) self.message = message self._escape_commit_message() # Add revision data to the local branch self.rev_id = self.builder.commit(self.message) except: self.builder.abort() raise self._process_pre_hooks(old_revno, new_revno) # Upload revision data to the master. # this will propagate merged revisions too if needed. if self.bound_branch: if not self.master_branch.repository.has_same_location( self.branch.repository): self._set_progress_stage("Uploading data to master branch") self.master_branch.repository.fetch(self.branch.repository, revision_id=self.rev_id) # now the master has the revision data # 'commit' to the master first so a timeout here causes the # local branch to be out of date self.master_branch.set_last_revision_info(new_revno, self.rev_id) # and now do the commit locally. self.branch.set_last_revision_info(new_revno, self.rev_id) # Make the working tree up to date with the branch self._set_progress_stage("Updating the working tree") self.work_tree.update_basis_by_delta(self.rev_id, self._basis_delta) self.reporter.completed(new_revno, self.rev_id) self._process_post_hooks(old_revno, new_revno) finally: self._cleanup() return self.rev_id
def __init__(self, command): BzrError.__init__( self, "The command %s is blacklisted for shell use" % command)
def __init__(self, command): BzrError.__init__(self, "The command %s is blacklisted for shell use" % command)
def __init__(self, fileid, revid): BzrError.__init__(self) self.fileid = fileid self.revid = revid