def unpack_submodules(ud, url, module, modpath, d): url += ";bareclone=1;nobranch=1" # Figure out where we clone over the bare submodules... if ud.bareclone: repo_conf = ud.destdir else: repo_conf = os.path.join(ud.destdir, '.git') try: newfetch = Fetch([url], d, cache=False) newfetch.unpack(root=os.path.dirname(os.path.join(repo_conf, 'modules', module))) except Exception as e: logger.error('gitsm: submodule unpack failed: %s %s' % (type(e).__name__, str(e))) raise local_path = newfetch.localpath(url) # Correct the submodule references to the local download version... runfetchcmd("%(basecmd)s config submodule.%(module)s.url %(url)s" % {'basecmd': ud.basecmd, 'module': module, 'url' : local_path}, d, workdir=ud.destdir) if ud.shallow: runfetchcmd("%(basecmd)s config submodule.%(module)s.shallow true" % {'basecmd': ud.basecmd, 'module': module}, d, workdir=ud.destdir) # Ensure the submodule repository is NOT set to bare, since we're checking it out... try: runfetchcmd("%s config core.bare false" % (ud.basecmd), d, quiet=True, workdir=os.path.join(repo_conf, 'modules', module)) except: logger.error("Unable to set git config core.bare to false for %s" % os.path.join(repo_conf, 'modules', module)) raise
def unpack_submodules(ud, url, module, modpath, d): url += ";bareclone=1;nobranch=1" # Figure out where we clone over the bare submodules... if ud.bareclone: repo_conf = ud.destdir else: repo_conf = os.path.join(ud.destdir, '.git') try: newfetch = Fetch([url], d, cache=False) newfetch.unpack(root=os.path.dirname(os.path.join(repo_conf, 'modules', modpath))) except Exception as e: logger.error('gitsm: submodule unpack failed: %s %s' % (type(e).__name__, str(e))) raise local_path = newfetch.localpath(url) # Correct the submodule references to the local download version... runfetchcmd("%(basecmd)s config submodule.%(module)s.url %(url)s" % {'basecmd': ud.basecmd, 'module': module, 'url' : local_path}, d, workdir=ud.destdir) if ud.shallow: runfetchcmd("%(basecmd)s config submodule.%(module)s.shallow true" % {'basecmd': ud.basecmd, 'module': module}, d, workdir=ud.destdir) # Ensure the submodule repository is NOT set to bare, since we're checking it out... try: runfetchcmd("%s config core.bare false" % (ud.basecmd), d, quiet=True, workdir=os.path.join(repo_conf, 'modules', modpath)) except: logger.error("Unable to set git config core.bare to false for %s" % os.path.join(repo_conf, 'modules', modpath)) raise
def download_submodule(ud, url, module, modpath, d): url += ";bareclone=1;nobranch=1" # Is the following still needed? #url += ";nocheckout=1" try: newfetch = Fetch([url], d, cache=False) newfetch.download() except Exception as e: logger.error('gitsm: submodule download failed: %s %s' % (type(e).__name__, str(e))) raise
def download_submodule(ud, url, module, modpath, d): url += ";bareclone=1;nobranch=1" # Is the following still needed? #url += ";nocheckout=1" try: newfetch = Fetch([url], d, cache=False) newfetch.download() # Drop a nugget to add each of the srcrevs we've fetched (used by need_update) runfetchcmd("%s config --add bitbake.srcrev %s" % \ (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=ud.clonedir) except Exception as e: logger.error('gitsm: submodule download failed: %s %s' % (type(e).__name__, str(e))) raise
def need_update_submodule(ud, url, module, modpath, workdir, d): url += ";bareclone=1;nobranch=1" try: newfetch = Fetch([url], d, cache=False) new_ud = newfetch.ud[url] if new_ud.method.need_update(new_ud, d): need_update_list.append(modpath) except Exception as e: logger.error('gitsm: submodule update check failed: %s %s' % (type(e).__name__, str(e))) need_update_result = True
def _setup_proxy(self, ud, d): if ud.proxy is None: if not os.path.exists(ud.resolvefile): self._resolve_proxy_url(ud, d) with open(ud.resolvefile, "r") as f: url = f.read() # Avoid conflicts between the environment data and: # - the proxy url checksum data = bb.data.createCopy(d) data.delVarFlags("SRC_URI") ud.proxy = Fetch([url], data)
def update_submodules(self, ud, d): submodules = [] paths = {} uris = {} local_paths = {} for name in ud.names: try: gitmodules = runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revisions[name]), d, quiet=True, workdir=ud.clonedir) except: # No submodules to update continue for m, md in self.parse_gitmodules(gitmodules).items(): submodules.append(m) paths[m] = md['path'] uris[m] = md['url'] if uris[m].startswith('..'): newud = copy.copy(ud) newud.path = os.path.realpath( os.path.join(newud.path, md['url'])) uris[m] = Git._get_repo_url(self, newud) for module in submodules: module_hash = runfetchcmd( "%s ls-tree -z -d %s %s" % (ud.basecmd, ud.revisions[name], paths[module]), d, quiet=True, workdir=ud.clonedir) module_hash = module_hash.split()[2] # Build new SRC_URI proto = uris[module].split(':', 1)[0] url = uris[module].replace('%s:' % proto, 'gitsm:', 1) url += ';protocol=%s' % proto url += ";name=%s" % module url += ";bareclone=1;nocheckout=1;nobranch=1" ld = d.createCopy() # Not necessary to set SRC_URI, since we're passing the URI to # Fetch. #ld.setVar('SRC_URI', url) ld.setVar('SRCREV_%s' % module, module_hash) # Workaround for issues with SRCPV/SRCREV_FORMAT errors # error refer to 'multiple' repositories. Only the repository # in the original SRC_URI actually matters... ld.setVar('SRCPV', d.getVar('SRCPV')) ld.setVar('SRCREV_FORMAT', module) newfetch = Fetch([url], ld, cache=False) newfetch.download() local_paths[module] = newfetch.localpath(url) # Correct the submodule references to the local download version... runfetchcmd("%(basecmd)s config submodule.%(module)s.url %(url)s" % { 'basecmd': ud.basecmd, 'module': module, 'url': local_paths[module] }, d, workdir=ud.clonedir) symlink_path = os.path.join(ud.clonedir, 'modules', paths[module]) if not os.path.exists(symlink_path): try: os.makedirs(os.path.dirname(symlink_path), exist_ok=True) except OSError: pass os.symlink(local_paths[module], symlink_path) return True
def update_submodules(self, ud, d): submodules = [] paths = {} uris = {} local_paths = {} for name in ud.names: try: gitmodules = runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revisions[name]), d, quiet=True, workdir=ud.clonedir) except: # No submodules to update continue for m, md in self.parse_gitmodules(gitmodules).items(): submodules.append(m) paths[m] = md['path'] uris[m] = md['url'] if uris[m].startswith('..'): newud = copy.copy(ud) newud.path = os.path.realpath(os.path.join(newud.path, md['url'])) uris[m] = Git._get_repo_url(self, newud) for module in submodules: module_hash = runfetchcmd("%s ls-tree -z -d %s %s" % (ud.basecmd, ud.revisions[name], paths[module]), d, quiet=True, workdir=ud.clonedir) module_hash = module_hash.split()[2] # Build new SRC_URI proto = uris[module].split(':', 1)[0] url = uris[module].replace('%s:' % proto, 'gitsm:', 1) url += ';protocol=%s' % proto url += ";name=%s" % module url += ";bareclone=1;nocheckout=1;nobranch=1" ld = d.createCopy() # Not necessary to set SRC_URI, since we're passing the URI to # Fetch. #ld.setVar('SRC_URI', url) ld.setVar('SRCREV_%s' % module, module_hash) # Workaround for issues with SRCPV/SRCREV_FORMAT errors # error refer to 'multiple' repositories. Only the repository # in the original SRC_URI actually matters... ld.setVar('SRCPV', d.getVar('SRCPV')) ld.setVar('SRCREV_FORMAT', module) newfetch = Fetch([url], ld, cache=False) newfetch.download() local_paths[module] = newfetch.localpath(url) # Correct the submodule references to the local download version... runfetchcmd("%(basecmd)s config submodule.%(module)s.url %(url)s" % {'basecmd': ud.basecmd, 'module': module, 'url' : local_paths[module]}, d, workdir=ud.clonedir) symlink_path = os.path.join(ud.clonedir, 'modules', paths[module]) if not os.path.exists(symlink_path): try: os.makedirs(os.path.dirname(symlink_path), exist_ok=True) except OSError: pass os.symlink(local_paths[module], symlink_path) return True
def add_submodule(ud, url, module, modpath, workdir, d): url += ";bareclone=1;nobranch=1" newfetch = Fetch([url], d, cache=False) urldata.extend(newfetch.expanded_urldata())
def urldata_init(self, ud, d): """Init npmsw specific variables within url data""" # Get the 'shrinkwrap' parameter ud.shrinkwrap_file = re.sub(r"^npmsw://", "", ud.url.split(";")[0]) # Get the 'dev' parameter ud.dev = bb.utils.to_boolean(ud.parm.get("dev"), False) # Resolve the dependencies ud.deps = [] def _resolve_dependency(name, params, deptree): url = None localpath = None extrapaths = [] destsubdirs = [os.path.join("node_modules", dep) for dep in deptree] destsuffix = os.path.join(*destsubdirs) unpack = True integrity = params.get("integrity", None) resolved = params.get("resolved", None) version = params.get("version", None) # Handle registry sources if is_semver(version) and integrity: # Handle duplicate dependencies without url if not resolved: return localfile = npm_localfile(name, version) uri = URI(resolved) uri.params["downloadfilename"] = localfile checksum_name, checksum_expected = npm_integrity(integrity) uri.params[checksum_name] = checksum_expected url = str(uri) localpath = os.path.join(d.getVar("DL_DIR"), localfile) # Create a resolve file to mimic the npm fetcher and allow # re-usability of the downloaded file. resolvefile = localpath + ".resolved" bb.utils.mkdirhier(os.path.dirname(resolvefile)) with open(resolvefile, "w") as f: f.write(url) extrapaths.append(resolvefile) # Handle http tarball sources elif version.startswith("http") and integrity: localfile = npm_localfile(os.path.basename(version)) uri = URI(version) uri.params["downloadfilename"] = localfile checksum_name, checksum_expected = npm_integrity(integrity) uri.params[checksum_name] = checksum_expected url = str(uri) localpath = os.path.join(d.getVar("DL_DIR"), localfile) # Handle git sources elif version.startswith("git"): if version.startswith("github:"): version = "git+https://github.com/" + version[len("github:"):] regex = re.compile(r""" ^ git\+ (?P<protocol>[a-z]+) :// (?P<url>[^#]+) \# (?P<rev>[0-9a-f]+) $ """, re.VERBOSE) match = regex.match(version) if not match: raise ParameterError("Invalid git url: %s" % version, ud.url) groups = match.groupdict() uri = URI("git://" + str(groups["url"])) uri.params["protocol"] = str(groups["protocol"]) uri.params["rev"] = str(groups["rev"]) uri.params["destsuffix"] = destsuffix url = str(uri) # Handle local tarball and link sources elif version.startswith("file"): localpath = version[5:] if not version.endswith(".tgz"): unpack = False else: raise ParameterError("Unsupported dependency: %s" % name, ud.url) ud.deps.append({ "url": url, "localpath": localpath, "extrapaths": extrapaths, "destsuffix": destsuffix, "unpack": unpack, }) try: with open(ud.shrinkwrap_file, "r") as f: shrinkwrap = json.load(f) except Exception as e: raise ParameterError("Invalid shrinkwrap file: %s" % str(e), ud.url) foreach_dependencies(shrinkwrap, _resolve_dependency, ud.dev) # Avoid conflicts between the environment data and: # - the proxy url revision # - the proxy url checksum data = bb.data.createCopy(d) data.delVar("SRCREV") data.delVarFlags("SRC_URI") # This fetcher resolves multiple URIs from a shrinkwrap file and then # forwards it to a proxy fetcher. The management of the donestamp file, # the lockfile and the checksums are forwarded to the proxy fetcher. ud.proxy = Fetch([dep["url"] for dep in ud.deps if dep["url"]], data) ud.needdonestamp = False
def update_submodules(self, ud, d): submodules = [] paths = {} uris = {} local_paths = {} for name in ud.names: try: gitmodules = runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revisions[name]), d, quiet=True, workdir=ud.clonedir) except: # No submodules to update continue module = "" for line in gitmodules.splitlines(): if line.startswith('[submodule'): module = line.split('"')[1] submodules.append(module) elif module and line.strip().startswith('path'): path = line.split('=')[1].strip() paths[module] = path elif module and line.strip().startswith('url'): url = line.split('=')[1].strip() uris[module] = url for module in submodules: module_hash = runfetchcmd("%s ls-tree -z -d %s %s" % (ud.basecmd, ud.revisions[name], paths[module]), d, quiet=True, workdir=ud.clonedir) module_hash = module_hash.split()[2] # Build new SRC_URI proto = uris[module].split(':', 1)[0] url = uris[module].replace('%s:' % proto, 'gitsm:', 1) url += ';protocol=%s' % proto url += ";name=%s" % module url += ";qbareclone=1;nocheckout=1" ld = d.createCopy() # Not necessary to set SRC_URI, since we're passing the URI to # Fetch. #ld.setVar('SRC_URI', url) ld.setVar('SRCREV_%s' % module, module_hash) # Workaround for issues with SRCPV/SRCREV_FORMAT errors # error refer to 'multiple' repositories. Only the repository # in the original SRC_URI actually matters... ld.setVar('SRCPV', d.getVar('SRCPV')) ld.setVar('SRCREV_FORMAT', module) newfetch = Fetch([url], ld, cache=False) newfetch.download() local_paths[module] = newfetch.localpath(url) # Correct the submodule references to the local download version... runfetchcmd("%(basecmd)s config submodule.%(module)s.url %(url)s" % {'basecmd': ud.basecmd, 'module': module, 'url' : local_paths[module]}, d, workdir=ud.clonedir) try: os.mkdir(os.path.join(ud.clonedir, 'modules')) except OSError: pass if not os.path.exists(os.path.join(ud.clonedir, 'modules', paths[module])): os.symlink(local_paths[module], os.path.join(ud.clonedir, 'modules', paths[module])) return True