def _do_patches(self, buildscript): # now patch the working tree for (patch, patchstrip) in self.patches: patchfile = "" if urlparse(patch)[0]: # patch name has scheme, get patch from network try: patchfile = httpcache.load(patch, nonetwork=buildscript.config.nonetwork) except urllib2.HTTPError as e: raise BuildStateError("could not download patch (error: %s)" % e.code) except urllib2.URLError as e: raise BuildStateError("could not download patch") elif self.repository.moduleset_uri: # get it relative to the moduleset uri, either in the same # directory or a patches/ subdirectory for patch_prefix in (".", "patches", "../patches"): uri = urljoin(self.repository.moduleset_uri, os.path.join(patch_prefix, patch)) try: patchfile = httpcache.load(uri, nonetwork=buildscript.config.nonetwork) except Exception as e: continue if not os.path.isfile(patchfile): continue break else: patchfile = "" if not patchfile: # nothing else, use icbuild provided patches possible_locations = [] if self.config.modulesets_dir: possible_locations.append(os.path.join(self.config.modulesets_dir, "patches")) possible_locations.append(os.path.join(self.config.modulesets_dir, "../patches")) if PKGDATADIR: possible_locations.append(os.path.join(PKGDATADIR, "patches")) if SRCDIR: possible_locations.append(os.path.join(SRCDIR, "patches")) for dirname in possible_locations: patchfile = os.path.join(dirname, patch) if os.path.exists(patchfile): break else: raise CommandError("Failed to find patch: %s" % patch) buildscript.set_action("Applying patch", self, action_target=patch) # patchfile can be a relative file buildscript.execute('patch -p%d < "%s"' % (patchstrip, os.path.abspath(patchfile)), cwd=self.raw_srcdir)
def _parse_module_set(config, uri): try: filename = httpcache.load(uri, nonetwork=config.nonetwork, age=0) except Exception as e: raise FatalError('could not download %s: %s' % (uri, e)) filename = os.path.normpath(filename) try: document = xml.dom.minidom.parse(filename) except IOError as e: raise FatalError('failed to parse %s: %s' % (filename, e)) except xml.parsers.expat.ExpatError as e: raise FatalError('failed to parse %s: %s' % (uri, e)) assert document.documentElement.nodeName == 'moduleset' for node in _child_elements_matching(document.documentElement, ['redirect']): new_url = node.getAttribute('href') logging.info('moduleset is now located at %s', new_url) return _parse_module_set(config, new_url) _handle_conditions(config, document.documentElement) moduleset = ModuleSet(config = config) moduleset_name = document.documentElement.getAttribute('name') if not moduleset_name: moduleset_name = os.path.basename(uri) if moduleset_name.endswith('.modules'): moduleset_name = moduleset_name[:-len('.modules')] # load up list of repositories repositories = {} default_repo = None for node in _child_elements_matching( document.documentElement, ['repository', 'cvsroot', 'svnroot', 'arch-archive']): name = node.getAttribute('name') if node.getAttribute('default') == 'yes': default_repo = name if node.nodeName == 'repository': repo_type = node.getAttribute('type') repo_class = get_repo_type(repo_type) kws = {} for attr in repo_class.init_xml_attrs: if node.hasAttribute(attr): kws[attr.replace('-', '_')] = node.getAttribute(attr) if name in repositories: logging.warning('Duplicate repository:' + ' '+ name) repositories[name] = repo_class(config, name, **kws) repositories[name].moduleset_uri = uri mirrors = {} for mirror in _child_elements_matching(node, ['mirror']): mirror_type = mirror.getAttribute('type') mirror_class = get_repo_type(mirror_type) kws = {} for attr in mirror_class.init_xml_attrs: if mirror.hasAttribute(attr): kws[attr.replace('-','_')] = mirror.getAttribute(attr) mirrors[mirror_type] = mirror_class(config, name, **kws) #mirrors[mirror_type].moduleset_uri = uri setattr(repositories[name], "mirrors", mirrors) if node.nodeName == 'cvsroot': cvsroot = node.getAttribute('root') if node.hasAttribute('password'): password = node.getAttribute('password') else: password = None repo_type = get_repo_type('cvs') repositories[name] = repo_type(config, name, cvsroot=cvsroot, password=password) elif node.nodeName == 'svnroot': svnroot = node.getAttribute('href') repo_type = get_repo_type('svn') repositories[name] = repo_type(config, name, href=svnroot) elif node.nodeName == 'arch-archive': archive_uri = node.getAttribute('href') repo_type = get_repo_type('arch') repositories[name] = repo_type(config, name, archive=name, href=archive_uri) # and now module definitions for node in _child_elements(document.documentElement): if node.nodeName == 'include': href = node.getAttribute('href') inc_uri = urljoin(uri, href) try: inc_moduleset = _parse_module_set(config, inc_uri) except UndefinedRepositoryError: raise except FatalError as e: if inc_uri[0] == '/': raise e # look up in local modulesets inc_uri = os.path.join(os.path.dirname(__file__), '..', 'modulesets', href) inc_moduleset = _parse_module_set(config, inc_uri) moduleset.modules.update(inc_moduleset.modules) elif node.nodeName in ['repository', 'cvsroot', 'svnroot', 'arch-archive']: pass else: module = modtypes.parse_xml_node(node, config, uri, repositories, default_repo) if moduleset_name: module.tags.append(moduleset_name) module.moduleset_name = moduleset_name moduleset.add(module) # keep default repository around, used when creating automatic modules global _default_repo if default_repo: _default_repo = repositories[default_repo] return moduleset