def verify_add_remove(self, add_remove): '''Verify the (non)existence of files in add_remove are valid.''' for filename in add_remove.get_added_files(): full_path = pjoin(self.work_dir, filename) if not os.path.exists(full_path): raise SemanticError, 'Missing added file "%s".' % full_path for filename in add_remove.get_removed_files(): full_path = pjoin(self.work_dir, filename) if os.path.exists(full_path): raise SemanticError, 'Delete removed file "%s".' \ % full_path if not self.is_new(): for item in self.git.iter_ls_tree('HEAD', []): # don't bother verifying directories mode = item[0] if mode.startswith('04'): continue filename = item[3] if add_remove.get_status(filename): continue full_path = pjoin(self.work_dir, filename) if not os.path.exists(full_path): message = 'Missing known file "%s".' % full_path raise SemanticError, message
def test_writer_outer_release(self): handle = LazyWriter(pjoin(self.search_path, 'data', 'Release')) print >> handle, 'testing123' handle.close() release_path = pjoin(self.search_path, 'Release') release_handle = LazyWriter(release_path) self.writer.write_outer(release_handle) release_handle.close() expected_release = """Origin: Debian Label: Debian2 Suite: happy Version: 3.0r4 Codename: woody Date: Wed, 22 Mar 2005 21:20:00 UTC Architectures: alpha i386 Components: main contrib Description: Hello World! MD5Sum: bad9425ff652b1bd52b49720abecf0ba 11 data/Release SHA1: e3dc8362c1586e4d9702ad862f29b6bef869afde 11 data/Release """ actual_release = self.read_file(release_path) # Newer versions of apt-ftparchive put more sums after the file. # We'll overlook these for now, for compatibility purposes. edited_release = ''.join(actual_release.splitlines(True)[:13]) self.assert_equals_long(expected_release, edited_release)
def get_one_dir(self, section, arch): """Return the index directory path for a given section and architecture.""" base = pjoin(self.repo_dir, self.dist) if arch == 'source': arch_dir = 'source' else: arch_dir = 'binary-%s' % arch return pjoin(base, section, arch_dir)
def __init__(self, work_path, git_path): self.work_dir = work_path self.vc_dir = git_path self.priv_dir = pjoin(git_path, 'pdk') self.add_remove_file = pjoin(self.priv_dir, 'add-remove') self.alt_index = pjoin(self.priv_dir, 'pdk-index') self.exclude = pjoin(git_path, 'info', 'exclude') self.git = Git(self.work_dir, self.vc_dir, self.priv_dir, self.exclude, None) self.alt_git = Git(self.work_dir, self.vc_dir, self.priv_dir, self.exclude, self.alt_index)
def test_extra_pool_locations(self): self.assert_equal({}, self.bin_injector.get_extra_pool_locations()) src_package_path = self.src_injector.get_pool_location() src_package_dir = pjoin(src_package_path, '..') extras = {} extras.update(dict([ (pjoin(src_package_dir, filename), blob_id) for blob_id, dummy, filename in self.src.pdk.extra_file ])) actual_extras = self.src_injector.get_extra_pool_locations() self.assert_equals_long(extras, actual_extras)
def test_get_links(self): package_location = self.src_injector.get_pool_location() package_dir = pjoin(self.src_injector.get_pool_location(), '..') files = [ t[0] for t in self.src.pdk.extra_file ] files.sort() expected = { package_location: self.src.blob_id, pjoin(package_dir, 'apache2_2.0.53-5.diff.gz'): 'md5:0d060d66b3a1e6ec0b9c58e995f7b9f7', pjoin(package_dir, 'apache2_2.0.53.orig.tar.gz'): 'md5:40507bf19919334f07355eda2df017e5' } actual = self.src_injector.get_links() self.assert_equals_long(expected, actual)
def write_releases(self, writer): """Write all Release files for the repository.""" for section in self.sections: for arch in self.arches: release_path = pjoin( self.get_one_dir(section, arch) , 'Release' ) handle = LazyWriter(release_path) writer.write(handle, section, arch) release_path = pjoin(self.repo_dir, self.dist, 'Release') writer.write_outer(LazyWriter(release_path)) writer.sign_outer(LazyWriter(release_path + '.gpg'))
def update_index(self, add_remove, files, git): '''Write from scratch a correct index file for this workspace.''' if self.is_new(): pass else: git.run_read_tree('HEAD') git.refresh_index() # If no files are provided, git ls-files will list all files. # We rely on that. removed_files = add_remove.filter_remove_files(files) git.run_update_index(removed_files, force_remove_flag = True) # Find all modified files. # Be overly trusting. If we really cared about whether all the # files existed we would have called verify_add_remove before # calling this function. iter_modified = list(git.iter_ls_files(files, modified_flag = True)) modified_files = [] for filename in iter_modified: full_path = pjoin(self.work_dir, filename) if os.path.exists(full_path): modified_files.append(filename) modified_files.extend(add_remove.filter_add_files(files)) git.run_update_index(modified_files, add_flag = True)
def test_write_releases(self): calls = Set() outer = Set() class MockWriter(object): def write(self, handle, section, arch): calls.add((handle.name, section, arch)) def write_outer(self, handle): outer.add(handle.name) writer = MockWriter() self.repo.write_releases(writer) self.assert_equals(6, len(calls)) def make_tuple(section, arch): release_path = pjoin(self.repo.get_one_dir(section, arch), 'Release') return (release_path, section, arch) assert make_tuple('main', 'i386') in calls assert make_tuple('main', 'sparc') in calls assert make_tuple('main', 'source') in calls assert make_tuple('contrib', 'i386') in calls assert make_tuple('contrib', 'sparc') in calls assert make_tuple('contrib', 'source') in calls release_path = pjoin(self.repo.repo_dir, 'dists', 'happy', 'Release') expected = Set([release_path]) self.assert_equal(expected, outer)
def assert_no_dirs(self, files): '''Assert that none of the given files is a directory.''' for filename in files: if os.path.isdir(pjoin(self.work_dir, filename)): message = 'VC does not operate on directories: "%s"' \ % filename raise InputError, message
def update_index(self, add_remove, files, git): '''Write from scratch a correct index file for this workspace.''' if self.is_new(): pass else: git.run_read_tree('HEAD') git.refresh_index() # If no files are provided, git ls-files will list all files. # We rely on that. removed_files = add_remove.filter_remove_files(files) git.run_update_index(removed_files, force_remove_flag=True) # Find all modified files. # Be overly trusting. If we really cared about whether all the # files existed we would have called verify_add_remove before # calling this function. iter_modified = list(git.iter_ls_files(files, modified_flag=True)) modified_files = [] for filename in iter_modified: full_path = pjoin(self.work_dir, filename) if os.path.exists(full_path): modified_files.append(filename) modified_files.extend(add_remove.filter_add_files(files)) git.run_update_index(modified_files, add_flag=True)
def commit(self, commit_message_file, commit_message, files): '''Commit this workspace.''' add_remove = self.get_add_remove() self.assert_no_dirs(files) self.verify_add_remove(add_remove) self.update_index(add_remove, files, self.alt_git) # look for any files which need to be implicitly added and add them all_files = Set(self.alt_git.iter_ls_files([])) given_files = Set(files) removed_files = add_remove.get_removed_files() implicit_add_files = given_files - all_files - removed_files for implicit_add in implicit_add_files: full_path = pjoin(self.work_dir, implicit_add) if not os.path.exists(full_path): message = 'Missing file "%s".' % implicit_add raise SemanticError, message self.alt_git.run_update_index(implicit_add_files, add_flag=True) self.alt_git.run_commit(commit_message_file, commit_message) shell_command('git update-index --add --remove --refresh %s' % " ".join([f for f in all_files])) add_remove.clear(files)
def set_up(self): super(DebianPoolFixture, self).set_up() self.repo = DebianDirectPoolRepo(pjoin(self.work_dir, '.'), 'dists/happy', Set(['i386', 'sparc', 'source']), Set(['main', 'contrib']), os.path.join(self.work_dir, 'repo'))
def unlink_index(self): '''Remove the index file if it exists.''' if self.index_file: index_file = self.index_file else: index_file = pjoin(self.git_dir, 'index') if os.path.exists(index_file): os.unlink(index_file)
def get_pool_dir(self): """Return the top-level absolute path for the pool.""" if self.package.role == 'binary': name = self.package.pdk.sp_name else: name = self.package.pdk.name return pjoin(self.repo_dir, 'pool', self.section, name[0], name)
def test_lazy_writer(self): full_name = pjoin(self.repo.tmp_dir, 'asdf') handle = LazyWriter(full_name) print >> handle, 'hello' handle.close() self.fail_unless(os.path.exists(full_name)) handle = open(full_name) self.assert_equal("hello\n", handle.read()) handle.close()
def create(self): """ Populate self.vc_dir with a git skeleton. """ self.git.run_init_db() remotes_dir = pjoin(self.vc_dir, 'remotes') if not os.path.exists(remotes_dir): os.makedirs(remotes_dir) os.makedirs(self.priv_dir) print >> open(self.exclude, 'w'), 'etc'
def get_extra_pool_locations(self): """Return a dict { pool_location: fileref } Return a dictionary relating pool_locations to filerefs. This method only handles extra files. (diff.gz etc.) """ if not hasattr(self.package.pdk, 'extra_file'): return {} pool_dir = self.get_pool_dir() return dict([ (pjoin(pool_dir, filename), blob_id) for blob_id, dummy, filename in self.package.pdk.extra_file ])
def add(self, files): """ Initialize version control """ self.assert_no_dirs(files) self.assert_known(files, False) for name in files: if not os.path.exists(pjoin(self.work_dir, name)): message = 'File %s missing.' % name raise SemanticError(message) add_remove = self.get_add_remove() add_remove.add(files) add_remove.save()
def test_write_inner_release(self): dest = LazyWriter(pjoin(self.work_dir, 'Release')) self.writer.write(dest, 'main', 'i386') dest.close() expected = """Archive: stable Version: 3.0r4 Component: main Origin: Debian Label: Debian2 Architecture: i386 """ actual = self.read_file('Release') self.assert_equals_long(expected, actual)
def get_relative_pool_path(self): """Return the top-level path for the pool, relative to what will become the base URI for the repository.""" abs_path = str(self.get_pool_dir()) rel_path = "" fn = "" psplit = os.path.split while fn != "pool": (abs_path, fn) = psplit(abs_path) if rel_path: rel_path = pjoin(fn, rel_path) else: rel_path = fn return rel_path
def set_up(self): super(self.__class__, self).set_up() self.search_path = pjoin(self.work_dir, 'repo', 'stuff') release_time = 'Wed, 22 Mar 2005 21:20:00 UTC' contents = { ('apt-deb', 'archive'): 'stable', ('apt-deb', 'version'): '3.0r4', ('apt-deb', 'origin'): 'Debian', ('apt-deb', 'label'): 'Debian2', ('apt-deb', 'suite'): 'happy', ('apt-deb', 'codename'): 'woody', ('apt-deb', 'date'): release_time, ('apt-deb', 'description'): 'Hello World!' } self.writer = DebianReleaseWriter(contents, ['i386', 'alpha'], ['main', 'contrib'], self.search_path)
def remove(self, files, force): """ Remove files from version control. """ self.assert_no_dirs(files) self.assert_known(files, True) for name in files: if os.path.exists(pjoin(self.work_dir, name)): if force: os.unlink(name) else: message = 'File %s exists. Remove it and retry.' % name raise SemanticError(message) add_remove = self.get_add_remove() add_remove.remove(files) add_remove.save()
def create(self): """ Populate self.vc_dir with a git skeleton. """ self.git.run_init_db() remotes_dir = pjoin(self.vc_dir, 'remotes') if not os.path.exists(remotes_dir): os.makedirs(remotes_dir) os.makedirs(self.priv_dir) files_to_exclude = ['etc/cache', 'etc/channels', 'etc/tmp', 'etc/outside-world.cache', 'tmp'] os.remove(self.global_exclude) for item in files_to_exclude: print >> open(self.global_exclude, 'a'), item
def create(self): """ Populate self.vc_dir with a git skeleton. """ self.git.run_init_db() remotes_dir = pjoin(self.vc_dir, 'remotes') if not os.path.exists(remotes_dir): os.makedirs(remotes_dir) os.makedirs(self.priv_dir) files_to_exclude = [ 'etc/cache', 'etc/channels', 'etc/tmp', 'etc/outside-world.cache', 'tmp' ] os.remove(self.global_exclude) for item in files_to_exclude: print >> open(self.global_exclude, 'a'), item
def get_file_lists(self): """Get a dictionary of LazyWriters keyed by section and arch. Key format is (section, subsection, arch) Subsection should be None or 'debian-installer'. """ lists = {} for key in self._iter_file_list_keys(): section, subsection, arch = key if arch == "source": file_name = "%s/%s/source/Sources" \ % (self.dist, section) else: if subsection: file_name = "%s/%s/%s/binary-%s/Packages" \ % (self.dist, section, subsection, arch) else: file_name = "%s/%s/binary-%s/Packages" \ % (self.dist, section, arch) full_name = pjoin(self.repo_dir, file_name) lists[key] = LazyWriter(full_name) return lists
def commit(self, commit_message_file, commit_message, files): '''Commit this workspace.''' add_remove = self.get_add_remove() self.assert_no_dirs(files) self.verify_add_remove(add_remove) self.update_index(add_remove, files, self.alt_git) # look for any files which need to be implicitly added and add them all_files = Set(self.alt_git.iter_ls_files([])) given_files = Set(files) removed_files = add_remove.get_removed_files() implicit_add_files = given_files - all_files - removed_files for implicit_add in implicit_add_files: full_path = pjoin(self.work_dir, implicit_add) if not os.path.exists(full_path): message = 'Missing file "%s".' % implicit_add raise SemanticError, message self.alt_git.run_update_index(implicit_add_files, add_flag = True) self.alt_git.run_commit(commit_message_file, commit_message) shell_command('git update-index --add --remove --refresh %s' % " ".join([ f for f in all_files ])) add_remove.clear(files)
def set_up(self): super(CacheFixture, self).set_up() os.system(set_up_cache) self.cache = Cache(pjoin(self.work_dir, 'cache')) self.compiler = Compiler(self.cache)
def test_pool_location(self): location = self.bin_injector.get_pool_location() expected = pjoin(self.repo.repo_dir, 'pool', 'main', 'a', 'apache2', self.bin.filename) self.assert_equals_long(expected, location)
def get_pool_location(self): """Where should the given package be put?""" repo_path = self.get_pool_dir() repo_filename = self.package.filename return pjoin(repo_path, repo_filename)
def create_debian_pool_repo(self, product, provided_contents, repo_dir): """Do the work of creating a pool repo given packages.""" # some sane defaults for contents default_date = strftime("%a, %d %b %Y %H:%M:%S +0000", gmtime()) default_apt_suite_name = product.id contents = { ('apt-deb', 'suite'): default_apt_suite_name, ('apt-deb', 'version'): '0', ('apt-deb', 'origin'): default_apt_suite_name, ('apt-deb', 'label'): default_apt_suite_name, ('apt-deb', 'codename'): default_apt_suite_name, ('apt-deb', 'date'): default_date, ('apt-deb', 'description'): default_apt_suite_name, ('apt-deb', 'split-apt-components'): '', ('apt-deb', 'key'): None } contents.update(provided_contents) # check the apt-deb.key exists and has a valid key key = contents['apt-deb', 'key'] if key: key_status = subprocess.call('gpg --list-keys %s > /dev/null 2>&1' % \ (key), shell=True) if not key or key_status != 0: raise InputError, 'Cannot find gpg key, please set ' + \ '\'apt-deb.key\' in the meta of the component ' + \ 'file and ensure key exists with \'gpg --list-keys\'.' suite = contents['apt-deb', 'suite'] if contents['apt-deb', 'split-apt-components']: # an apt splittable component should not directly reference # packages if list(product.iter_direct_packages()): raise InputError, 'No direct package references ' + \ 'allowed with split-components is in effect' packages_dict = {} # sort packages belonging to various apt_components in a dict # keyed by apt component name for apt_component in product.iter_direct_components(): apt_name = get_apt_component_name(apt_component.ref) component_packages = list(apt_component.iter_packages()) packages_dict[apt_name] = component_packages else: # default behavior: dists/$compname/main . # see default suite value in contents above. packages_dict = {'main': list(product.iter_packages())} sections = packages_dict.keys() all_packages = Set(chain(*packages_dict.values())) arches = self.deb_scan_arches(all_packages) # Set True to use apt-ftparchive, False to use the direct version. cwd = os.getcwd() suitepath = pjoin('dists', suite) if False: repo = DebianPoolRepo(cwd, suitepath, arches, sections, repo_dir) else: repo = DebianDirectPoolRepo(cwd, suitepath, arches, sections, repo_dir) search_path = pjoin(repo.repo_dir, repo.dist) contents['apt-deb', 'archive'] = suite writer = DebianReleaseWriter(contents, arches, sections, search_path) repo.make_all_dirs() for section, packages in packages_dict.items(): for package in packages: injector = DebianPoolInjector(self.cache, package, section, repo.repo_dir) repo.write_to_lists(injector) injector.link_to_cache() repo.write_repo() repo.write_releases(writer)
def test_repo_and_tmp_dir(self): self.assert_equal(pjoin(self.work_dir, 'repo'), self.repo.repo_dir) self.assert_equal(pjoin(self.work_dir, 'tmp', 'dists', 'happy'), self.repo.tmp_dir)
def __init__(self, cache, package, section, repo_dir): self.cache = cache self.package = package self.section = section self.repo_dir = pjoin(repo_dir)
def create_debian_pool_repo(self, product, provided_contents, repo_dir): """Do the work of creating a pool repo given packages.""" # some sane defaults for contents default_date = strftime("%a, %d %b %Y %H:%M:%S +0000", gmtime()) default_apt_suite_name = product.id contents = { ('apt-deb', 'suite'): default_apt_suite_name, ('apt-deb', 'version'): '0', ('apt-deb', 'origin'):default_apt_suite_name, ('apt-deb', 'label'): default_apt_suite_name, ('apt-deb', 'codename'): default_apt_suite_name, ('apt-deb', 'date'): default_date, ('apt-deb', 'description'): default_apt_suite_name, ('apt-deb', 'split-apt-components'): '', ('apt-deb', 'key'): None} contents.update(provided_contents) suite = contents['apt-deb', 'suite'] if contents['apt-deb', 'split-apt-components']: # an apt splittable component should not directly reference # packages if list(product.iter_direct_packages()): raise InputError, 'No direct package references ' + \ 'allowed with split-components is in effect' packages_dict = {} # sort packages belonging to various apt_components in a dict # keyed by apt component name for apt_component in product.iter_direct_components(): apt_name = get_apt_component_name(apt_component.ref) component_packages = list(apt_component.iter_packages()) packages_dict[apt_name] = component_packages else: # default behavior: dists/$compname/main . # see default suite value in contents above. packages_dict = { 'main': list(product.iter_packages()) } sections = packages_dict.keys() all_packages = Set(chain(*packages_dict.values())) arches = self.deb_scan_arches(all_packages) # Set True to use apt-ftparchive, False to use the direct version. cwd = os.getcwd() suitepath = pjoin('dists', suite) if False: repo = DebianPoolRepo(cwd, suitepath, arches, sections, repo_dir) else: repo = DebianDirectPoolRepo(cwd, suitepath, arches, sections, repo_dir) search_path = pjoin(repo.repo_dir, repo.dist) contents['apt-deb', 'archive'] = suite writer = DebianReleaseWriter(contents, arches, sections, search_path) repo.make_all_dirs() for section, packages in packages_dict.items(): for package in packages: injector = DebianPoolInjector(self.cache, package, section, repo.repo_dir) repo.write_to_lists(injector) injector.link_to_cache() repo.write_repo() repo.write_releases(writer)
def make_tuple(section, arch): release_path = pjoin(self.repo.get_one_dir(section, arch), 'Release') return (release_path, section, arch)