def _get_patch_files(self, buildscript): patch_files = [] # now patch the working tree for (patch, patchstrip) in self.patches: patchfile = '' if urlutils.urlparse(patch)[0]: # patch name has scheme, get patch from network try: patchfile = httpcache.load( patch, nonetwork=buildscript.config.nonetwork) except urlutils.HTTPError as e: raise BuildStateError( _('could not download patch (error: %s)') % e.code) except urlutils.URLError: raise BuildStateError(_('could not download patch')) elif self.repository.moduleset_uri: # get it relative to the moduleset uri, either in the same # directory or a patches/ subdirectory for patch_prefix in ('.', 'patches', '../patches'): uri = urlutils.urljoin(self.repository.moduleset_uri, os.path.join(patch_prefix, patch)) try: patchfile = httpcache.load( uri, nonetwork=buildscript.config.nonetwork) except Exception: continue if not os.path.isfile(patchfile): continue break else: patchfile = '' if not patchfile: # nothing else, use jhbuild provided patches possible_locations = [] if self.config.modulesets_dir: possible_locations.append( os.path.join(self.config.modulesets_dir, 'patches')) possible_locations.append( os.path.join(self.config.modulesets_dir, '../patches')) if PKGDATADIR: possible_locations.append( os.path.join(PKGDATADIR, 'patches')) if SRCDIR: possible_locations.append(os.path.join(SRCDIR, 'patches')) for dirname in possible_locations: patchfile = os.path.join(dirname, patch) if os.path.exists(patchfile): break else: raise CommandError(_('Failed to find patch: %s') % patch) patch_files.append((patchfile, patch, patchstrip)) return patch_files
def branch(self, name, module=None, checkoutdir=None): if name in self.config.branches: module = self.config.branches[name] if not module: raise FatalError(_('branch for %(name)s has wrong override, check your %(filename)s') % \ {'name' : name, 'filename' : self.config.filename}) else: if module is None: module = name module = urlutils.urljoin(self.href, module) return DarcsBranch(self, module, checkoutdir)
def branch(self, name, module=None, checkoutdir=None, revision=None, tag=None, user=None, revspec=None, branch=None, module_href=None): if name in self.config.branches: module_href = self.config.branches[name] if not module_href: raise FatalError(_('branch for %(name)s has wrong override, check your %(filename)s') % \ {'name' : name, 'filename' : self.config.filename}) if module is None: module = name if revision or branch: template = urlutils.urljoin(self.href, self.branches_template) else: template = urlutils.urljoin(self.href, self.trunk_template) if not module_href: module_href = template % { 'module': module, 'revision': revision, 'branch': branch, 'tag': tag, 'user': user, } if checkoutdir is None: checkoutdir = name return BzrBranch(self, module_href, checkoutdir, tag, revspec)
def branch(self, name, version, module=None, checkoutdir=None, size=None, md5sum=None, hash=None, branch_id=None, source_subdir=None, rename_tarball=None): if name in self.config.branches: module = self.config.branches[name] if not module: raise FatalError(_('branch for %(name)s has wrong override, check your %(filename)s') % \ {'name' : name, 'filename' : self.config.filename}) else: if module is None: module = name module = urlutils.urljoin(self.href, module) module = module.replace('${version}', version) if checkoutdir is not None: checkoutdir = checkoutdir.replace('${version}', version) if size is not None: size = int(size) if md5sum and not hash: hash = 'md5:' + md5sum if rename_tarball is not None: rename_tarball = rename_tarball.replace('${name}', name).replace( '${version}', version) return TarballBranch(self, module=module, version=version, checkoutdir=checkoutdir, source_size=size, source_hash=hash, branch_id=branch_id, source_subdir=source_subdir, tarball_name=rename_tarball)
def _parse_module_set(config, uri): try: filename = httpcache.load(uri, nonetwork=config.nonetwork, age=0) except Exception as e: raise FatalError(_('could not download %s: %s') % (uri, e)) filename = os.path.normpath(filename) try: document = xml.dom.minidom.parse(filename) except IOError as e: raise FatalError(_('failed to parse %s: %s') % (filename, e)) except xml.parsers.expat.ExpatError as e: raise FatalError(_('failed to parse %s: %s') % (uri, e)) assert document.documentElement.nodeName == 'moduleset' for node in _child_elements_matching(document.documentElement, ['redirect']): new_url = node.getAttribute('href') logging.info('moduleset is now located at %s', new_url) return _parse_module_set(config, new_url) _handle_conditions(config, document.documentElement) moduleset = ModuleSet(config = config) moduleset_name = document.documentElement.getAttribute('name') if not moduleset_name: moduleset_name = os.path.basename(uri) if moduleset_name.endswith('.modules'): moduleset_name = moduleset_name[:-len('.modules')] # load up list of repositories repositories = {} default_repo = None for node in _child_elements_matching( document.documentElement, ['repository', 'cvsroot', 'svnroot', 'arch-archive']): name = node.getAttribute('name') if node.getAttribute('default') == 'yes': default_repo = name if node.nodeName == 'repository': repo_type = node.getAttribute('type') repo_class = get_repo_type(repo_type) kws = {} for attr in repo_class.init_xml_attrs: if node.hasAttribute(attr): kws[attr.replace('-', '_')] = node.getAttribute(attr) if name in repositories: logging.warning(_('Duplicate repository:') + ' '+ name) repositories[name] = repo_class(config, name, **kws) repositories[name].moduleset_uri = uri mirrors = {} for mirror in _child_elements_matching(node, ['mirror']): mirror_type = mirror.getAttribute('type') mirror_class = get_repo_type(mirror_type) kws = {} for attr in mirror_class.init_xml_attrs: if mirror.hasAttribute(attr): kws[attr.replace('-','_')] = mirror.getAttribute(attr) mirrors[mirror_type] = mirror_class(config, name, **kws) # mirrors[mirror_type].moduleset_uri = uri setattr(repositories[name], "mirrors", mirrors) if node.nodeName == 'cvsroot': cvsroot = node.getAttribute('root') if node.hasAttribute('password'): password = node.getAttribute('password') else: password = None repo_type = get_repo_type('cvs') repositories[name] = repo_type(config, name, cvsroot=cvsroot, password=password) elif node.nodeName == 'svnroot': svnroot = node.getAttribute('href') repo_type = get_repo_type('svn') repositories[name] = repo_type(config, name, href=svnroot) elif node.nodeName == 'arch-archive': archive_uri = node.getAttribute('href') repo_type = get_repo_type('arch') repositories[name] = repo_type(config, name, archive=name, href=archive_uri) # and now module definitions for node in _child_elements(document.documentElement): if node.nodeName == 'include': href = node.getAttribute('href') inc_uri = urlutils.urljoin(uri, href) try: inc_moduleset = _parse_module_set(config, inc_uri) except UndefinedRepositoryError: raise except FatalError as e: if inc_uri[0] == '/': raise e # look up in local modulesets inc_uri = os.path.join(os.path.dirname(__file__), '..', 'modulesets', href) inc_moduleset = _parse_module_set(config, inc_uri) moduleset.modules.update(inc_moduleset.modules) elif node.nodeName in ['repository', 'cvsroot', 'svnroot', 'arch-archive']: pass else: module = modtypes.parse_xml_node(node, config, uri, repositories, default_repo) if moduleset_name: module.tags.append(moduleset_name) module.moduleset_name = moduleset_name moduleset.add(module) # keep default repository around, used when creating automatic modules global _default_repo if default_repo: _default_repo = repositories[default_repo] return moduleset