def urldata_init(self, ud, d): """ init NPM specific variable within url data """ if 'downloadfilename' in ud.parm: ud.basename = ud.parm['downloadfilename'] else: ud.basename = os.path.basename(ud.path) # can't call it ud.name otherwise fetcher base class will start doing sha1stuff # TODO: find a way to get an sha1/sha256 manifest of pkg & all deps ud.pkgname = ud.parm.get("name", None) if not ud.pkgname: raise ParameterError("NPM fetcher requires a name parameter", ud.url) ud.version = ud.parm.get("version", None) if not ud.version: raise ParameterError("NPM fetcher requires a version parameter", ud.url) ud.bbnpmmanifest = "%s-%s.deps.json" % (ud.pkgname, ud.version) ud.registry = "http://%s" % (ud.url.replace('npm://', '', 1).split(';'))[0] prefixdir = "npm/%s" % ud.pkgname ud.pkgdatadir = d.expand("${DL_DIR}/%s" % prefixdir) if not os.path.exists(ud.pkgdatadir): bb.utils.mkdirhier(ud.pkgdatadir) ud.localpath = d.expand("${DL_DIR}/npm/%s" % ud.bbnpmmanifest) self.basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -O -t 2 -T 30 -nv --passive-ftp --no-check-certificate " ud.prefixdir = prefixdir ud.write_tarballs = ((data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) or "0") != "0") ud.mirrortarball = 'npm_%s-%s.tar.xz' % (ud.pkgname, ud.version) ud.fullmirror = os.path.join(d.getVar("DL_DIR", True), ud.mirrortarball)
def _npm_view(): args = [] args.append(("json", "true")) args.append(("registry", ud.registry)) pkgver = shlex.quote(ud.package + "@" + ud.version) cmd = ud.basecmd + " view %s" % pkgver env = NpmEnvironment(d) check_network_access(d, cmd, ud.registry) view_string = env.run(cmd, args=args) if not view_string: raise FetchError("Unavailable package %s" % pkgver, ud.url) try: view = json.loads(view_string) error = view.get("error") if error is not None: raise FetchError(error.get("summary"), ud.url) if ud.version == "latest": bb.warn("The npm package %s is using the latest " \ "version available. This could lead to " \ "non-reproducible builds." % pkgver) elif ud.version != view.get("version"): raise ParameterError("Invalid 'version' parameter", ud.url) return view except Exception as e: raise FetchError("Invalid view from npm: %s" % str(e), ud.url)
def urldata_init(self, ud, d): """ init ClearCase specific variable within url data """ ud.proto = "https" if 'protocol' in ud.parm: ud.proto = ud.parm['protocol'] if not ud.proto in ('http', 'https'): raise ParameterError("Invalid protocol type", ud.url) ud.vob = '' if 'vob' in ud.parm: ud.vob = ud.parm['vob'] else: msg = ud.url+": vob must be defined so the fetcher knows what to get." raise MissingParameterError('vob', msg) if 'module' in ud.parm: ud.module = ud.parm['module'] else: ud.module = "" ud.basecmd = d.getVar("FETCHCMD_ccrc") or "/usr/bin/env cleartool || rcleartool" if d.getVar("SRCREV") == "INVALID": raise FetchError("Set a valid SRCREV for the clearcase fetcher in your recipe, e.g. SRCREV = \"/main/LATEST\" or any other label of your choice.") ud.label = d.getVar("SRCREV", False) ud.customspec = d.getVar("CCASE_CUSTOM_CONFIG_SPEC") ud.server = "%s://%s%s" % (ud.proto, ud.host, ud.path) ud.identifier = "clearcase-%s%s-%s" % ( ud.vob.replace("/", ""), ud.module.replace("/", "."), ud.label.replace("/", ".")) ud.viewname = "%s-view%s" % (ud.identifier, d.getVar("DATETIME", d, True)) ud.csname = "%s-config-spec" % (ud.identifier) ud.ccasedir = os.path.join(d.getVar("DL_DIR"), ud.type) ud.viewdir = os.path.join(ud.ccasedir, ud.viewname) ud.configspecfile = os.path.join(ud.ccasedir, ud.csname) ud.localfile = "%s.tar.gz" % (ud.identifier) self.debug("host = %s" % ud.host) self.debug("path = %s" % ud.path) self.debug("server = %s" % ud.server) self.debug("proto = %s" % ud.proto) self.debug("type = %s" % ud.type) self.debug("vob = %s" % ud.vob) self.debug("module = %s" % ud.module) self.debug("basecmd = %s" % ud.basecmd) self.debug("label = %s" % ud.label) self.debug("ccasedir = %s" % ud.ccasedir) self.debug("viewdir = %s" % ud.viewdir) self.debug("viewname = %s" % ud.viewname) self.debug("configspecfile = %s" % ud.configspecfile) self.debug("localfile = %s" % ud.localfile) ud.localfile = os.path.join(d.getVar("DL_DIR"), ud.localfile)
def urldata_init(self, urldata, d): if 'protocol' in urldata.parm and urldata.parm['protocol'] == 'git': raise ParameterError( "Invalid protocol - if you wish to fetch from a git " + "repository using ssh, you need to use " + "git:// prefix with protocol=ssh", urldata.url) m = __pattern__.match(urldata.url) path = m.group('path') host = m.group('host') urldata.localpath = os.path.join( d.getVar('DL_DIR'), os.path.basename(os.path.normpath(path)))
def urldata_init(self, ud, d): """Init npm specific variables within url data""" ud.package = None ud.version = None ud.registry = None # Get the 'package' parameter if "package" in ud.parm: ud.package = ud.parm.get("package") if not ud.package: raise MissingParameterError("Parameter 'package' required", ud.url) # Get the 'version' parameter if "version" in ud.parm: ud.version = ud.parm.get("version") if not ud.version: raise MissingParameterError("Parameter 'version' required", ud.url) if not is_semver(ud.version) and not ud.version == "latest": raise ParameterError("Invalid 'version' parameter", ud.url) # Extract the 'registry' part of the url ud.registry = re.sub(r"^npm://", "http://", ud.url.split(";")[0]) # Using the 'downloadfilename' parameter as local filename # or the npm package name. if "downloadfilename" in ud.parm: ud.localfile = npm_localfile(d.expand(ud.parm["downloadfilename"])) else: ud.localfile = npm_localfile(ud.package, ud.version) # Get the base 'npm' command ud.basecmd = d.getVar("FETCHCMD_npm") or "npm" # This fetcher resolves a URI from a npm package name and version and # then forwards it to a proxy fetcher. A resolve file containing the # resolved URI is created to avoid unwanted network access (if the file # already exists). The management of the donestamp file, the lockfile # and the checksums are forwarded to the proxy fetcher. ud.proxy = None ud.needdonestamp = False ud.resolvefile = self.localpath(ud, d) + ".resolved"
def _resolve_dependency(name, params, deptree): url = None localpath = None extrapaths = [] destsubdirs = [os.path.join("node_modules", dep) for dep in deptree] destsuffix = os.path.join(*destsubdirs) unpack = True integrity = params.get("integrity", None) resolved = params.get("resolved", None) version = params.get("version", None) # Handle registry sources if is_semver(version) and integrity: # Handle duplicate dependencies without url if not resolved: return localfile = npm_localfile(name, version) uri = URI(resolved) uri.params["downloadfilename"] = localfile checksum_name, checksum_expected = npm_integrity(integrity) uri.params[checksum_name] = checksum_expected url = str(uri) localpath = os.path.join(d.getVar("DL_DIR"), localfile) # Create a resolve file to mimic the npm fetcher and allow # re-usability of the downloaded file. resolvefile = localpath + ".resolved" bb.utils.mkdirhier(os.path.dirname(resolvefile)) with open(resolvefile, "w") as f: f.write(url) extrapaths.append(resolvefile) # Handle http tarball sources elif version.startswith("http") and integrity: localfile = npm_localfile(os.path.basename(version)) uri = URI(version) uri.params["downloadfilename"] = localfile checksum_name, checksum_expected = npm_integrity(integrity) uri.params[checksum_name] = checksum_expected url = str(uri) localpath = os.path.join(d.getVar("DL_DIR"), localfile) # Handle git sources elif version.startswith("git"): if version.startswith("github:"): version = "git+https://github.com/" + version[len("github:"):] regex = re.compile(r""" ^ git\+ (?P<protocol>[a-z]+) :// (?P<url>[^#]+) \# (?P<rev>[0-9a-f]+) $ """, re.VERBOSE) match = regex.match(version) if not match: raise ParameterError("Invalid git url: %s" % version, ud.url) groups = match.groupdict() uri = URI("git://" + str(groups["url"])) uri.params["protocol"] = str(groups["protocol"]) uri.params["rev"] = str(groups["rev"]) uri.params["destsuffix"] = destsuffix url = str(uri) # Handle local tarball and link sources elif version.startswith("file"): localpath = version[5:] if not version.endswith(".tgz"): unpack = False else: raise ParameterError("Unsupported dependency: %s" % name, ud.url) ud.deps.append({ "url": url, "localpath": localpath, "extrapaths": extrapaths, "destsuffix": destsuffix, "unpack": unpack, })
def urldata_init(self, ud, d): """Init npmsw specific variables within url data""" # Get the 'shrinkwrap' parameter ud.shrinkwrap_file = re.sub(r"^npmsw://", "", ud.url.split(";")[0]) # Get the 'dev' parameter ud.dev = bb.utils.to_boolean(ud.parm.get("dev"), False) # Resolve the dependencies ud.deps = [] def _resolve_dependency(name, params, deptree): url = None localpath = None extrapaths = [] destsubdirs = [os.path.join("node_modules", dep) for dep in deptree] destsuffix = os.path.join(*destsubdirs) unpack = True integrity = params.get("integrity", None) resolved = params.get("resolved", None) version = params.get("version", None) # Handle registry sources if is_semver(version) and integrity: # Handle duplicate dependencies without url if not resolved: return localfile = npm_localfile(name, version) uri = URI(resolved) uri.params["downloadfilename"] = localfile checksum_name, checksum_expected = npm_integrity(integrity) uri.params[checksum_name] = checksum_expected url = str(uri) localpath = os.path.join(d.getVar("DL_DIR"), localfile) # Create a resolve file to mimic the npm fetcher and allow # re-usability of the downloaded file. resolvefile = localpath + ".resolved" bb.utils.mkdirhier(os.path.dirname(resolvefile)) with open(resolvefile, "w") as f: f.write(url) extrapaths.append(resolvefile) # Handle http tarball sources elif version.startswith("http") and integrity: localfile = npm_localfile(os.path.basename(version)) uri = URI(version) uri.params["downloadfilename"] = localfile checksum_name, checksum_expected = npm_integrity(integrity) uri.params[checksum_name] = checksum_expected url = str(uri) localpath = os.path.join(d.getVar("DL_DIR"), localfile) # Handle git sources elif version.startswith("git"): if version.startswith("github:"): version = "git+https://github.com/" + version[len("github:"):] regex = re.compile(r""" ^ git\+ (?P<protocol>[a-z]+) :// (?P<url>[^#]+) \# (?P<rev>[0-9a-f]+) $ """, re.VERBOSE) match = regex.match(version) if not match: raise ParameterError("Invalid git url: %s" % version, ud.url) groups = match.groupdict() uri = URI("git://" + str(groups["url"])) uri.params["protocol"] = str(groups["protocol"]) uri.params["rev"] = str(groups["rev"]) uri.params["destsuffix"] = destsuffix url = str(uri) # Handle local tarball and link sources elif version.startswith("file"): localpath = version[5:] if not version.endswith(".tgz"): unpack = False else: raise ParameterError("Unsupported dependency: %s" % name, ud.url) ud.deps.append({ "url": url, "localpath": localpath, "extrapaths": extrapaths, "destsuffix": destsuffix, "unpack": unpack, }) try: with open(ud.shrinkwrap_file, "r") as f: shrinkwrap = json.load(f) except Exception as e: raise ParameterError("Invalid shrinkwrap file: %s" % str(e), ud.url) foreach_dependencies(shrinkwrap, _resolve_dependency, ud.dev) # Avoid conflicts between the environment data and: # - the proxy url revision # - the proxy url checksum data = bb.data.createCopy(d) data.delVar("SRCREV") data.delVarFlags("SRC_URI") # This fetcher resolves multiple URIs from a shrinkwrap file and then # forwards it to a proxy fetcher. The management of the donestamp file, # the lockfile and the checksums are forwarded to the proxy fetcher. ud.proxy = Fetch([dep["url"] for dep in ud.deps if dep["url"]], data) ud.needdonestamp = False