def _revert(self, package): global _STORE # we do not need a "transaction" here, because an interrupted revert # can be easily continued later (in the worst case after a repair # of the project wc) st = self._status(package) if st == '?': msg = "cannot revert untracked package: %s" % package raise ValueError(msg) elif st == 'A': path = os.path.join(self.path, package) store = os.path.join(path, _STORE) if os.path.islink(store): os.unlink(store) # TODO: refactor this code path (and the on in _remove_wc_dir) # into a new function in the util module store = wc_pkg_data_filename(self.path, package) if os.path.exists(store): shutil.rmtree(store) self._packages.remove(package) self._packages.write() return # just revert the package pkg = self.package(package) if pkg is None: path = os.path.join(self.path, package) storedir = wc_pkg_data_filename(self.path, package) wc_init(path, ext_storedir=storedir) # now the package can be obtained pkg = self.package(package) pkg.revert() if st != ' ': self._packages.set(package, ' ') self._packages.write()
def repair(path, ext_storedir=None, revision='latest', **kwargs): """Repair a working copy. path is the path to the package working copy. Keyword arguments: project -- name of the project (default: '') package -- name of the package (default: '') apiurl -- apiurl is the apiurl (default: '') revision -- the revision of the package (default: 'latest') ext_storedir -- path to the storedir (default: None) """ global _PKG_DATA if not os.path.exists(_storedir(path)): wc_init(path, ext_storedir=ext_storedir) missing, xml_data, pkg_data = Package.wc_check(path) for filename in ('_project', '_package', '_apiurl'): if filename not in missing: continue key = filename[1:] if key not in kwargs: raise ValueError("%s argument required" % key) meth_name = 'wc_write_' + key globals()[meth_name](path, kwargs[key]) project = wc_read_project(path) package = wc_read_package(path) apiurl = wc_read_apiurl(path) if '_files' in missing or xml_data: spkg = SourcePackage(project, package) directory = spkg.list(rev=revision, apiurl=apiurl) xml_data = etree.tostring(directory, pretty_print=True) wc_write_files(path, xml_data) if '_version' in missing: wc_write_version(path) if _PKG_DATA in missing: os.mkdir(wc_pkg_data_filename(path, '')) files = wc_read_files(path) # check again - only pkg_data left missing, xml_data, pkg_data = Package.wc_check(path) for filename in pkg_data: fname = wc_pkg_data_filename(path, filename) f = files.find(filename).file() f.write_to(fname) # clean unused storefiles store = wc_pkg_data_filename(path, '') for filename in os.listdir(store): if files.find(filename) is None: os.unlink(os.path.join(store, filename))
def _perform_merges(self, ustate): uinfo = ustate.info filestates = ustate.entrystates for filename in uinfo.modified: wc_filename = os.path.join(self.path, filename) old_filename = wc_pkg_data_filename(self.path, filename) your_filename = os.path.join(ustate.location, filename) st = filestates[filename] if st == '!' or st == 'D' and not os.path.exists(wc_filename): my_filename = old_filename else: # XXX: in some weird cases wc_filename.mine might be a tracked # file - for now overwrite it my_filename = wc_filename + '.mine' # a rename would be more efficient but also more error prone # (if a update is interrupted) copy_file(wc_filename, my_filename) merge = self.merge_class() ret = merge.merge(my_filename, old_filename, your_filename, wc_filename) if ret == Merge.SUCCESS: if st == 'D': ustate.processed(filename, 'D') else: ustate.processed(filename, ' ') os.unlink(my_filename) elif ret in (Merge.CONFLICT, Merge.BINARY, Merge.FAILURE): copy_file(your_filename, wc_filename + '.rev%s' % uinfo.srcmd5) ustate.processed(filename, 'C') # copy over new storefile os.rename(your_filename, old_filename) self.notifier.processed(filename, ustate.entrystates[filename], st)
def _commit(self, cstate, **kwargs): cinfo = cstate.info # FIXME: validation if cstate.state == CommitStateMixin.STATE_TRANSFER: cfilelist = self._calculate_commit_filelist(cinfo) missing = self._commit_filelist(cfilelist, **kwargs) send_filenames = self._read_send_files(missing) if send_filenames: self._commit_files(cstate, send_filenames) filelist = self._commit_filelist(cfilelist, **kwargs) else: filelist = missing cstate.append_filelist(filelist) cstate.state = CommitStateMixin.STATE_COMMITTING # only local changes left for filename in cinfo.deleted: store_filename = wc_pkg_data_filename(self.path, filename) # it might be already removed (if we resume a commit) if os.path.exists(store_filename): os.unlink(store_filename) cstate.processed(filename, None) self.notifier.processed(filename, None) for filename in os.listdir(cstate.location): wc_filename = os.path.join(self.path, filename) store_filename = wc_pkg_data_filename(self.path, filename) commit_filename = os.path.join(cstate.location, filename) if os.path.exists(store_filename): # just to reduce disk space usage os.unlink(store_filename) copy_file(commit_filename, wc_filename) os.rename(commit_filename, store_filename) self._files.merge(cstate.entrystates, cstate.filelist) # fixup mtimes for filename in self.files(): if self.status(filename) != ' ': continue entry = self._files.find(filename) wc_filename = os.path.join(self.path, filename) store_filename = wc_pkg_data_filename(self.path, filename) mtime = int(entry.get('mtime')) os.utime(wc_filename, (-1, mtime)) os.utime(store_filename, (-1, mtime)) cstate.cleanup() self.notifier.finished('commit', aborted=False)
def _perform_adds(self, ustate): uinfo = ustate.info for filename in uinfo.added: wc_filename = os.path.join(self.path, filename) store_filename = wc_pkg_data_filename(self.path, filename) new_filename = os.path.join(ustate.location, filename) copy_file(new_filename, wc_filename) ustate.processed(filename, ' ') os.rename(new_filename, store_filename) self.notifier.processed(filename, ' ', None)
def repair(path, project='', apiurl='', no_packages=False, **package_states): """Repair a working copy. path is the path to the project working copy. Keyword arguments: project -- the name of the project (default: '') apiurl -- the apiurl of the project (default: '') no_packages -- do not repair the project's packages (default: False) **package_states -- a package to state mapping (default: {}) """ global _PKG_DATA missing, xml_data, pkg_data = Project.wc_check(path) if '_project' in missing: if not project: raise ValueError('project argument required') wc_write_project(path, project) if '_apiurl' in missing: if not apiurl: raise ValueError('apiurl argument required') wc_write_apiurl(path, apiurl) if '_packages' in missing or xml_data: if not package_states: raise ValueError('package states required') wc_write_packages(path, '<packages/>') packages = wc_read_packages(path) for package, st in package_states.items(): packages.add(package, state=st) packages.write() if '_version' in missing: wc_write_version(path) if _PKG_DATA in missing: os.mkdir(wc_pkg_data_filename(path, '')) if not no_packages: project = wc_read_project(path) apiurl = wc_read_apiurl(path) packages = wc_read_packages(path) missing, xml_data, pkg_data = Project.wc_check(path) # only pkg data left for package in pkg_data: package_path = os.path.join(path, package) if os.path.isdir(package_path): storedir = wc_pkg_data_mkdir(path, package) Package.repair(package_path, project=project, package=package, apiurl=apiurl, ext_storedir=storedir) else: packages.remove(package) packages.write()
def repair(path, project='', apiurl='', no_packages=False, **package_states): """Repair a working copy. path is the path to the project working copy. Keyword arguments: project -- the name of the project (default: '') apiurl -- the apiurl of the project (default: '') no_packages -- do not repair the project's packages (default: False) **package_states -- a package to state mapping (default: {}) """ global _PKG_DATA missing, xml_data, pkg_data = Project.wc_check(path) if '_project' in missing: if not project: raise ValueError('project argument required') wc_write_project(path, project) if '_apiurl' in missing: if not apiurl: raise ValueError('apiurl argument required') wc_write_apiurl(path, apiurl) if '_packages' in missing or xml_data: if not package_states: raise ValueError('package states required') wc_write_packages(path, '<packages/>') packages = wc_read_packages(path) for package, st in package_states.iteritems(): packages.add(package, state=st) packages.write() if '_version' in missing: wc_write_version(path) if _PKG_DATA in missing: os.mkdir(wc_pkg_data_filename(path, '')) if not no_packages: project = wc_read_project(path) apiurl = wc_read_apiurl(path) packages = wc_read_packages(path) missing, xml_data, pkg_data = Project.wc_check(path) # only pkg data left for package in pkg_data: package_path = os.path.join(path, package) if os.path.isdir(package_path): storedir = wc_pkg_data_mkdir(path, package) Package.repair(package_path, project=project, package=package, apiurl=apiurl, ext_storedir=storedir) else: packages.remove(package) packages.write()
def _perform_deletes_or_skips(self, ustate, listname, new_state): uinfo = ustate.info for filename in getattr(uinfo, listname): wc_filename = os.path.join(self.path, filename) store_filename = wc_pkg_data_filename(self.path, filename) store_md5 = '' st = self.status(filename) if os.path.exists(store_filename): store_md5 = file_md5(store_filename) if (os.path.isfile(wc_filename) and file_md5(wc_filename) == store_md5): os.unlink(wc_filename) if store_md5: os.unlink(store_filename) ustate.processed(filename, new_state) self.notifier.processed(filename, new_state, st)
def _remove_wc_dir(self, package, notify=False): pkg = self.package(package) if pkg is not None: for filename in pkg.files(): st = pkg.status(filename) pkg.remove(filename) if notify: self.notifier.processed(filename, None, st) store = os.path.join(pkg.path, _STORE) if os.path.exists(store) and os.path.islink(store): os.unlink(store) filenames = [f for f in os.listdir(pkg.path)] if not filenames: os.rmdir(pkg.path) store = wc_pkg_data_filename(self.path, package) if os.path.exists(store): shutil.rmtree(store)
def _update(self, ustate): if ustate.state == UpdateStateMixin.STATE_PREPARE: uinfo = ustate.info self._download(ustate.location, uinfo.data, *uinfo.added) self._download(ustate.location, uinfo.data, *uinfo.modified) ustate.state = UpdateStateMixin.STATE_UPDATING self._perform_merges(ustate) self._perform_adds(ustate) self._perform_deletes(ustate) self._perform_skips(ustate) for filename in os.listdir(ustate.location): # if a merge/add was interrupted the storefile wasn't copied new_filename = os.path.join(ustate.location, filename) store_filename = wc_pkg_data_filename(self.path, filename) os.rename(new_filename, store_filename) self._files.merge(ustate.entrystates, ustate.info.remote_xml) ustate.cleanup() self.notifier.finished('update', aborted=False)
def _revert(self, filename): st = self.status(filename) wc_filename = os.path.join(self.path, filename) store_filename = wc_pkg_data_filename(self.path, filename) entry = self._files.find(filename) if st == 'C': raise ValueError("cannot revert conflicted file: %s" % filename) elif st == '?': raise ValueError("cannot revert untracked file: %s" % filename) elif st == 'S': raise ValueError("cannot revert skipped file: %s" % filename) elif st == 'A' or st == '!' and entry.get('state') == 'A': self._files.remove(filename) elif st == 'D': self._files.set(filename, ' ') if not os.path.exists(wc_filename): copy_file(store_filename, wc_filename) elif st in ('M', '!'): self._files.set(filename, ' ') copy_file(store_filename, wc_filename) self._files.write()
def _perform_adds(self, ustate, **kwargs): uinfo = ustate.info tl = self.notifier.listener for package in uinfo.added: tmp_dir = os.path.join(ustate.location, package) storedir = wc_pkg_data_filename(self.path, package) if ustate.state == UpdateStateMixin.STATE_PREPARE: os.mkdir(storedir) pkg = Package.init(tmp_dir, self.name, package, self.apiurl, storedir, transaction_listener=tl) pkg.update(**kwargs) ustate.state = UpdateStateMixin.STATE_UPDATING # fixup symlink new_dir = os.path.join(self.path, package) path = os.path.relpath(storedir, new_dir) old_storelink = _storedir(tmp_dir) if os.path.isdir(tmp_dir): if os.path.exists(old_storelink): os.unlink(old_storelink) os.symlink(path, old_storelink) os.rename(tmp_dir, new_dir) ustate.processed(package, ' ') self.notifier.processed(package, ' ', None)
def diff(self, diff, *filenames, **kwargs): """Initialize diff object. filenames are the working copy filenames which should be considered. If no filenames are specified all working copy files will be used. A ValueError is raised if a filename is not tracked. Keyword arguments: revision -- diff against the remote revision revision (default: '') """ def consider_filenames(info, filenames): if set(filenames) == set(self.files()): return # only consider filenames remove = [] for filename in info: if filename not in filenames: remove.append(filename) for filename in remove: info.remove(filename) untracked = [f for f in filenames if self.status(f) == '?'] if untracked: msg = ("diff not possible untracked files: %s" % ', '.join(untracked)) raise ValueError(msg) revision = kwargs.get('revision', '') if not filenames: filenames = self.files() diff.wc_path = self.path diff.revision_data = self._files.revision_data() if revision: spkg = SourcePackage(self.project, self.name) directory = spkg.list(rev=revision, apiurl=self.apiurl) info = self._calculate_updateinfo(remote_files=directory) consider_filenames(info, filenames) # swap added and deleted tmp = info.added info.added = info.deleted info.deleted = tmp # treat files with state 'A' as added local_added = [f for f in info.unchanged if self.status(f) == 'A'] for filename in local_added: info.unchanged.remove(filename) info.added.append(filename) # check for missing files missing = [f for f in info if self.status(f) == '!'] # treat files with state 'D' as deleted deleted = [f for f in info if self.status(f) == 'D'] for filename in missing + deleted: info.remove(filename) info.conflicted.extend(missing) info.deleted.extend(deleted) diff._remote_files = directory srcmd5 = directory.get('srcmd5') diff.old_path = wc_diff_mkdir(self.path, srcmd5) diff.revision_data = {'rev': revision, 'srcmd5': srcmd5} else: info = self._calculate_commitinfo(*filenames) consider_filenames(info, filenames) skipped = [f for f in info.unchanged if self.status(f) == 'S'] for filename in skipped: info.remove(filename) info.skipped = skipped diff.old_path = wc_pkg_data_filename(self.path, '') listnames = ('added', 'deleted', 'modified', 'unchanged', 'skipped') for listname in listnames: for filename in getattr(info, listname): diff.append(filename, listname) for filename in info.conflicted: if self.status(filename) == '!': diff.append(filename, 'missing') else: diff.append(filename, 'modified')
def convert_package(path, ext_storedir=None, **kwargs): """Convert working copy to the new format. path is the path to the package working copy. Keyword arguments: project -- name of the project (default: '') package -- name of the package (default: '') apiurl -- apiurl is the apiurl (default: '') ext_storedir -- path to the external storedir (default: None) """ data_path = wc_pkg_data_filename(path, '') if not os.path.exists(data_path): os.mkdir(data_path) if missing_storepaths(path, '_project'): project = kwargs.get('project', '') if not project: raise ValueError('project argument required') else: project = wc_read_project(path) deleted = [] added = [] conflicted = [] if os.path.exists(_storefile(path, '_to_be_deleted')): deleted = _read_storefile(path, '_to_be_deleted').split() os.unlink(_storefile(path, '_to_be_deleted')) if os.path.exists(_storefile(path, '_to_be_added')): added = _read_storefile(path, '_to_be_added').split() os.unlink(_storefile(path, '_to_be_added')) if os.path.exists(_storefile(path, '_in_conflict')): conflicted = _read_storefile(path, '_in_conflict').split() os.unlink(_storefile(path, '_in_conflict')) try: files = wc_read_files(path) except ValueError: files = None if files is not None: files._xml.set('project', project) for entry in files: filename = entry.get('name') store = _storefile(path, filename) data = wc_pkg_data_filename(path, filename) if os.path.exists(store): os.rename(store, data) if filename in added: files.set(filename, 'A') elif filename in deleted: files.set(filename, 'D') elif filename in conflicted: files.set(filename, 'C') else: files.set(filename, ' ') for filename in added: if files.find(filename) is None: files.add(filename, 'A') files.write() if _storefile(path, '_osclib_version'): os.unlink(_storefile(path, '_osclib_version')) if ext_storedir is not None: # move all files to the new location storedir = _storedir(path) for filename in os.listdir(_storefile(path, '')): old = os.path.join(storedir, filename) new = os.path.join(ext_storedir, filename) os.rename(old, new) os.rmdir(storedir) os.symlink(os.path.relpath(ext_storedir, path), storedir) Package.repair(path, ext_storedir=ext_storedir, **kwargs)