def get_workspace_section(self, name): '''Get the named workspace section.''' if name not in self.sections: raise SemanticError('%s is not known workspace' % name) section = self.sections[name][0] if section.__class__ != RemoteWorkspaceSection: raise SemanticError('%s is a channel, not a workspace' % name) return section
def get_new_directory(self): '''Get a new directory. The directory must not already exist. ''' new_dir = self.pop_arg('new directory') if os.path.exists(new_dir): raise SemanticError('Already exists: "%s"' % new_dir) return new_dir
def cat(self, filename, revision='HEAD'): '''Get the historical version of the given filename. Returns a handle. ''' self.assert_no_dirs([filename]) if self.is_new(): message = 'Empty version control. Need an initial commit.' raise SemanticError(message) matching_files = list(self.git.iter_ls_tree(revision, [filename])) matches = len(matching_files) assert matches in (0, 1) if matches == 0: raise SemanticError('No file by that name in version %s' % revision) elif matches == 1: blob_id = matching_files[0][2] return self.git.get_blob(blob_id)
def add(self, files): """ Initialize version control """ self.assert_no_dirs(files) self.assert_known(files, False) for name in files: if not os.path.exists(pjoin(self.work_dir, name)): message = 'File %s missing.' % name raise SemanticError(message) add_remove = self.get_add_remove() add_remove.add(files) add_remove.save()
def remove(self, files, force): """ Remove files from version control. """ self.assert_no_dirs(files) self.assert_known(files, True) for name in files: if os.path.exists(pjoin(self.work_dir, name)): if force: os.unlink(name) else: message = 'File %s exists. Remove it and retry.' % name raise SemanticError(message) add_remove = self.get_add_remove() add_remove.remove(files) add_remove.save()
def build(self, sections_iterator, index_file): '''Build up IndexedWorldData from the data in the given sections. ''' indexed_field_names = ('name', 'sp-name', 'source-rpm', 'filename') indexed_fields = [('pdk', f) for f in indexed_field_names] index_writer = IndexWriter(index_file) index_writer.init() try: for section_name, section in sections_iterator: section_iterator = section.iter_package_info() for ghost, header, blob_id, locator in section_iterator: if ghost: type_string = ghost.type else: type_string = None addresses = index_writer.add(type_string, header, blob_id, locator) if ghost: index_keys = [] for field in indexed_fields: try: value = ghost[field] except KeyError: continue key = (section_name, field, value) index_keys.append(key) index_writer.index(index_keys, addresses) ent_id_key = ('ent-id', blob_id) channel_key = section_name index_writer.index([ent_id_key, channel_key], addresses) index_writer.terminate() del index_writer # Next time index_file is accessed, it will be reloaded, # therefore actually reading the new file we just wrote! del self.index_file except MissingChannelDataError: message = 'Missing cached data. ' + \ 'Consider running pdk channel update. ' + \ '(%s)' % section_name raise SemanticError(message)