def download(self, ud, d): """Fetch url""" jsondepobj = {} shrinkobj = {} lockdown = {} if not os.listdir(ud.pkgdatadir) and os.path.exists(ud.fullmirror): dest = d.getVar("DL_DIR", True) bb.utils.mkdirhier(dest) runfetchcmd("tar -xJf %s" % (ud.fullmirror), d, workdir=dest) return shwrf = d.getVar('NPM_SHRINKWRAP', True) logger.debug(2, "NPM shrinkwrap file is %s" % shwrf) try: with open(shwrf) as datafile: shrinkobj = json.load(datafile) except: logger.warning('Missing shrinkwrap file in NPM_SHRINKWRAP for %s, this will lead to unreliable builds!' % ud.pkgname) lckdf = d.getVar('NPM_LOCKDOWN', True) logger.debug(2, "NPM lockdown file is %s" % lckdf) try: with open(lckdf) as datafile: lockdown = json.load(datafile) except: logger.warning('Missing lockdown file in NPM_LOCKDOWN for %s, this will lead to unreproducible builds!' % ud.pkgname) if ('name' not in shrinkobj): self._getdependencies(ud.pkgname, jsondepobj, ud.version, d, ud) else: self._getshrinkeddependencies(ud.pkgname, shrinkobj, ud.version, d, ud, lockdown, jsondepobj) with open(ud.localpath, 'w') as outfile: json.dump(jsondepobj, outfile)
def download(self, ud, d): """Fetch url""" jsondepobj = {} shrinkobj = {} lockdown = {} if not os.listdir(ud.pkgdatadir) and os.path.exists(ud.fullmirror): dest = d.getVar("DL_DIR") bb.utils.mkdirhier(dest) runfetchcmd("tar -xJf %s" % (ud.fullmirror), d, workdir=dest) return if ud.parm.get("noverify", None) != '1': shwrf = d.getVar('NPM_SHRINKWRAP') logger.debug(2, "NPM shrinkwrap file is %s" % shwrf) if shwrf: try: with open(shwrf) as datafile: shrinkobj = json.load(datafile) except Exception as e: raise FetchError( 'Error loading NPM_SHRINKWRAP file "%s" for %s: %s' % (shwrf, ud.pkgname, str(e))) elif not ud.ignore_checksums: logger.warning( 'Missing shrinkwrap file in NPM_SHRINKWRAP for %s, this will lead to unreliable builds!' % ud.pkgname) lckdf = d.getVar('NPM_LOCKDOWN') logger.debug(2, "NPM lockdown file is %s" % lckdf) if lckdf: try: with open(lckdf) as datafile: lockdown = json.load(datafile) except Exception as e: raise FetchError( 'Error loading NPM_LOCKDOWN file "%s" for %s: %s' % (lckdf, ud.pkgname, str(e))) elif not ud.ignore_checksums: logger.warning( 'Missing lockdown file in NPM_LOCKDOWN for %s, this will lead to unreproducible builds!' % ud.pkgname) if ('name' not in shrinkobj): self._getdependencies(ud.pkgname, jsondepobj, ud.version, d, ud) else: self._getshrinkeddependencies(ud.pkgname, shrinkobj, ud.version, d, ud, lockdown, jsondepobj) with open(ud.localpath, 'w') as outfile: json.dump(jsondepobj, outfile)
def download(self, ud, d): """Fetch url""" jsondepobj = {} shrinkobj = {} lockdown = {} if not os.listdir(ud.pkgdatadir) and os.path.exists(ud.fullmirror): dest = d.getVar("DL_DIR") bb.utils.mkdirhier(dest) runfetchcmd("tar -xJf %s" % (ud.fullmirror), d, workdir=dest) return if ud.parm.get("noverify", None) != '1': shwrf = d.getVar('NPM_SHRINKWRAP') logger.debug(2, "NPM shrinkwrap file is %s" % shwrf) if shwrf: try: with open(shwrf) as datafile: shrinkobj = json.load(datafile) except Exception as e: raise FetchError('Error loading NPM_SHRINKWRAP file "%s" for %s: %s' % (shwrf, ud.pkgname, str(e))) elif not ud.ignore_checksums: logger.warning('Missing shrinkwrap file in NPM_SHRINKWRAP for %s, this will lead to unreliable builds!' % ud.pkgname) lckdf = d.getVar('NPM_LOCKDOWN') logger.debug(2, "NPM lockdown file is %s" % lckdf) if lckdf: try: with open(lckdf) as datafile: lockdown = json.load(datafile) except Exception as e: raise FetchError('Error loading NPM_LOCKDOWN file "%s" for %s: %s' % (lckdf, ud.pkgname, str(e))) elif not ud.ignore_checksums: logger.warning('Missing lockdown file in NPM_LOCKDOWN for %s, this will lead to unreproducible builds!' % ud.pkgname) if ('name' not in shrinkobj): self._getdependencies(ud.pkgname, jsondepobj, ud.version, d, ud) else: self._getshrinkeddependencies(ud.pkgname, shrinkobj, ud.version, d, ud, lockdown, jsondepobj) with open(ud.localpath, 'w') as outfile: json.dump(jsondepobj, outfile)
def process_submodules(self, ud, workdir, function, d): """ Iterate over all of the submodules in this repository and execute the 'function' for each of them. """ submodules = [] paths = {} revision = {} uris = {} subrevision = {} def parse_gitmodules(gitmodules): modules = {} module = "" for line in gitmodules.splitlines(): if line.startswith('[submodule'): module = line.split('"')[1] modules[module] = {} elif module and line.strip().startswith('path'): path = line.split('=')[1].strip() modules[module]['path'] = path elif module and line.strip().startswith('url'): url = line.split('=')[1].strip() modules[module]['url'] = url return modules # Collect the defined submodules, and their attributes for name in ud.names: try: gitmodules = runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revisions[name]), d, quiet=True, workdir=workdir) except: # No submodules to update continue for m, md in parse_gitmodules(gitmodules).items(): try: module_hash = runfetchcmd("%s ls-tree -z -d %s %s" % (ud.basecmd, ud.revisions[name], md['path']), d, quiet=True, workdir=workdir) except: # If the command fails, we don't have a valid file to check. If it doesn't # fail -- it still might be a failure, see next check... module_hash = "" if not module_hash: logger.debug(1, "submodule %s is defined, but is not initialized in the repository. Skipping", m) continue submodules.append(m) paths[m] = md['path'] revision[m] = ud.revisions[name] uris[m] = md['url'] subrevision[m] = module_hash.split()[2] # Convert relative to absolute uri based on parent uri if uris[m].startswith('..'): newud = copy.copy(ud) newud.path = os.path.realpath(os.path.join(newud.path, uris[m])) uris[m] = Git._get_repo_url(self, newud) for module in submodules: # Translate the module url into a SRC_URI if "://" in uris[module]: # Properly formated URL already proto = uris[module].split(':', 1)[0] url = uris[module].replace('%s:' % proto, 'gitsm:', 1) else: if ":" in uris[module]: # Most likely an SSH style reference proto = "ssh" if ":/" in uris[module]: # Absolute reference, easy to convert.. url = "gitsm://" + uris[module].replace(':/', '/', 1) else: # Relative reference, no way to know if this is right! logger.warning("Submodule included by %s refers to relative ssh reference %s. References may fail if not absolute." % (ud.url, uris[module])) url = "gitsm://" + uris[module].replace(':', '/', 1) else: # This has to be a file reference proto = "file" url = "gitsm://" + uris[module] url += ';protocol=%s' % proto url += ";name=%s" % module url += ";subpath=%s" % paths[module] ld = d.createCopy() # Not necessary to set SRC_URI, since we're passing the URI to # Fetch. #ld.setVar('SRC_URI', url) ld.setVar('SRCREV_%s' % module, subrevision[module]) # Workaround for issues with SRCPV/SRCREV_FORMAT errors # error refer to 'multiple' repositories. Only the repository # in the original SRC_URI actually matters... ld.setVar('SRCPV', d.getVar('SRCPV')) ld.setVar('SRCREV_FORMAT', module) function(ud, url, module, paths[module], ld) return submodules != []
def process_submodules(self, ud, workdir, function, d): """ Iterate over all of the submodules in this repository and execute the 'function' for each of them. """ submodules = [] paths = {} revision = {} uris = {} subrevision = {} def parse_gitmodules(gitmodules): modules = {} module = "" for line in gitmodules.splitlines(): if line.startswith('[submodule'): module = line.split('"')[1] modules[module] = {} elif module and line.strip().startswith('path'): path = line.split('=')[1].strip() modules[module]['path'] = path elif module and line.strip().startswith('url'): url = line.split('=')[1].strip() modules[module]['url'] = url return modules # Collect the defined submodules, and their attributes for name in ud.names: try: gitmodules = runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revisions[name]), d, quiet=True, workdir=workdir) except: # No submodules to update continue for m, md in parse_gitmodules(gitmodules).items(): try: module_hash = runfetchcmd( "%s ls-tree -z -d %s %s" % (ud.basecmd, ud.revisions[name], md['path']), d, quiet=True, workdir=workdir) except: # If the command fails, we don't have a valid file to check. If it doesn't # fail -- it still might be a failure, see next check... module_hash = "" if not module_hash: logger.debug( 1, "submodule %s is defined, but is not initialized in the repository. Skipping", m) continue submodules.append(m) paths[m] = md['path'] revision[m] = ud.revisions[name] uris[m] = md['url'] subrevision[m] = module_hash.split()[2] # Convert relative to absolute uri based on parent uri if uris[m].startswith('..'): newud = copy.copy(ud) newud.path = os.path.realpath( os.path.join(newud.path, uris[m])) uris[m] = Git._get_repo_url(self, newud) for module in submodules: # Translate the module url into a SRC_URI if "://" in uris[module]: # Properly formated URL already proto = uris[module].split(':', 1)[0] url = uris[module].replace('%s:' % proto, 'gitsm:', 1) else: if ":" in uris[module]: # Most likely an SSH style reference proto = "ssh" if ":/" in uris[module]: # Absolute reference, easy to convert.. url = "gitsm://" + uris[module].replace(':/', '/', 1) else: # Relative reference, no way to know if this is right! logger.warning( "Submodule included by %s refers to relative ssh reference %s. References may fail if not absolute." % (ud.url, uris[module])) url = "gitsm://" + uris[module].replace(':', '/', 1) else: # This has to be a file reference proto = "file" url = "gitsm://" + uris[module] url += ';protocol=%s' % proto url += ";name=%s" % module url += ";subpath=%s" % module ld = d.createCopy() # Not necessary to set SRC_URI, since we're passing the URI to # Fetch. #ld.setVar('SRC_URI', url) ld.setVar('SRCREV_%s' % module, subrevision[module]) # Workaround for issues with SRCPV/SRCREV_FORMAT errors # error refer to 'multiple' repositories. Only the repository # in the original SRC_URI actually matters... ld.setVar('SRCPV', d.getVar('SRCPV')) ld.setVar('SRCREV_FORMAT', module) function(ud, url, module, paths[module], workdir, ld) return submodules != []